From 8f52bbca70b79121640c9974074a3c68f467e529 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 Feb 2023 19:31:39 +0000 Subject: [PATCH 001/162] Bump actions/upload-artifact from 2 to 3 Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 2 to 3. - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/v2...v3) --- updated-dependencies: - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/backend_tests.yml | 6 +++--- .github/workflows/frontend_tests.yml | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/backend_tests.yml b/.github/workflows/backend_tests.yml index 12e50334..22965a1a 100644 --- a/.github/workflows/backend_tests.yml +++ b/.github/workflows/backend_tests.yml @@ -140,7 +140,7 @@ jobs: - name: Upload coverage data # pin to upload coverage from only one matrix entry, otherwise coverage gets confused later if: always() && matrix.session == 'tests' && matrix.python == '3.11' && matrix.os == 'ubuntu-latest' && matrix.database == 'mysql' - uses: "actions/upload-artifact@v3.0.0" + uses: "actions/upload-artifact@v3" # this action doesn't seem to respect working-directory so include working-directory value in path with: name: coverage-data @@ -148,14 +148,14 @@ jobs: - name: Upload documentation if: matrix.session == 'docs-build' - uses: actions/upload-artifact@v3.0.0 + uses: actions/upload-artifact@v3 with: name: docs path: docs/_build - name: Upload logs if: failure() && matrix.session == 'tests' - uses: "actions/upload-artifact@v3.0.0" + uses: "actions/upload-artifact@v3" with: name: logs-${{matrix.python}}-${{matrix.os}}-${{matrix.database}} path: "./log/*.log" diff --git a/.github/workflows/frontend_tests.yml b/.github/workflows/frontend_tests.yml index 405b359c..4e80311b 100644 --- a/.github/workflows/frontend_tests.yml +++ b/.github/workflows/frontend_tests.yml @@ -108,21 +108,21 @@ jobs: run: ./bin/get_logs_from_docker_compose >./log/docker_compose.log - name: Upload logs if: failure() - uses: "actions/upload-artifact@v3.0.0" + uses: "actions/upload-artifact@v3" with: name: spiffworkflow-backend-logs path: "./spiffworkflow-backend/log/*.log" # https://github.com/cypress-io/github-action#artifacts - name: upload_screenshots - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 if: failure() with: name: cypress-screenshots path: ./spiffworkflow-frontend/cypress/screenshots # Test run video was always captured, so this action uses "always()" condition - name: upload_videos - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 if: failure() with: name: cypress-videos From f0df36c8620b33cd8b650b2cf8322a52c063b34e Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 8 Mar 2023 13:06:25 -0500 Subject: [PATCH 002/162] initial changes to remove loop reset with spiff w/ burnettk --- spiffworkflow-backend/poetry.lock | 6 +-- spiffworkflow-backend/pyproject.toml | 2 +- .../src/spiffworkflow_backend/api.yml | 12 ++++++ .../routes/process_instances_controller.py | 38 ++++++++++++++++--- .../src/routes/ProcessInstanceShow.tsx | 2 +- 5 files changed, 50 insertions(+), 10 deletions(-) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index c95a3a95..338f96dd 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1854,8 +1854,8 @@ lxml = "*" [package.source] type = "git" url = "https://github.com/sartography/SpiffWorkflow" -reference = "main" -resolved_reference = "bee868d38b2c3da680c7a96b6a634d16b90d5861" +reference = "feature/remove-loop-reset" +resolved_reference = "13034aaf12f62aa3914744ca05bc9a3e3b3c3452" [[package]] name = "SQLAlchemy" @@ -2234,7 +2234,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.12" -content-hash = "2fd5138221eabec441b601bb3769be478bed42099e72e20f7b8aaa1c1a888909" +content-hash = "eac3b5aa78efea376a9e23e32f9e6853cc22c17a2a21b41e30800cb7c807d017" [metadata.files] alabaster = [ diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index 4f47921b..fbaf1127 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -27,7 +27,7 @@ flask-marshmallow = "*" flask-migrate = "*" flask-restful = "*" werkzeug = "*" -SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} +SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "feature/remove-loop-reset"} # SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" } sentry-sdk = "^1.10" sphinx-autoapi = "^2.0" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index bc5e4f0b..963ef12b 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -889,6 +889,12 @@ paths: description: If set will return the tasks as they were during a specific step of execution. schema: type: integer + - name: most_recent_tasks_only + in: query + required: false + description: If true, this wil return only the most recent tasks. + schema: + type: boolean get: tags: - Process Instances @@ -936,6 +942,12 @@ paths: description: If set will return the tasks as they were during a specific step of execution. schema: type: integer + - name: most_recent_tasks_only + in: query + required: false + description: If true, this wil return only the most recent tasks. + schema: + type: boolean get: tags: - Process Instances diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 97d7f632..634bf0ae 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -1,5 +1,6 @@ """APIs for dealing with process groups, process models, and process instances.""" import json +from uuid import UUID from typing import Any from typing import Dict from typing import Optional @@ -505,6 +506,7 @@ def process_instance_task_list_without_task_data_for_me( process_instance_id: int, all_tasks: bool = False, spiff_step: int = 0, + most_recent_tasks_only: bool = False, ) -> flask.wrappers.Response: """Process_instance_task_list_without_task_data_for_me.""" process_instance = _find_process_instance_for_me_or_raise(process_instance_id) @@ -513,6 +515,7 @@ def process_instance_task_list_without_task_data_for_me( process_instance, all_tasks, spiff_step, + most_recent_tasks_only, ) @@ -521,6 +524,7 @@ def process_instance_task_list_without_task_data( process_instance_id: int, all_tasks: bool = False, spiff_step: int = 0, + most_recent_tasks_only: bool = False, ) -> flask.wrappers.Response: """Process_instance_task_list_without_task_data.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) @@ -529,6 +533,7 @@ def process_instance_task_list_without_task_data( process_instance, all_tasks, spiff_step, + most_recent_tasks_only, ) @@ -561,6 +566,8 @@ def process_instance_task_list( subprocess_state_overrides = {} for step_detail in step_details: + # if step_detail.bpmn_task_identifier == 'Activity_0iajzy6': + # print(f"step_detail: {step_detail}") if step_detail.task_id in tasks: tasks[step_detail.task_id]["state"] = Task.task_state_name_to_int( step_detail.task_state @@ -583,20 +590,39 @@ def process_instance_task_list( for spiff_task_id in tasks: if spiff_task_id not in steps_by_id: + # if tasks[spiff_task_id]['task_spec'] == 'Activity_0iajzy6': + # print(f"tasks[spiff_task_id]: {tasks[spiff_task_id]}") tasks[spiff_task_id]["data"] = {} + state_to_set = TaskState.FUTURE + if tasks[spiff_task_id]["state"] == TaskState.LIKELY: + # print("WE HERE") + previous_completed_steps_for_bpmn_task_identifier = [s for s in step_details if s.bpmn_task_identifier == tasks[spiff_task_id]['task_spec'] and s.task_state == "COMPLETED"] + # previous_completed_steps_for_bpmn_task_identifier = [s for s in step_details if s.task_state == "COMPLETED"] + # print(f"previous_completed_steps_for_bpmn_task_identifier: {previous_completed_steps_for_bpmn_task_identifier}") + if len(previous_completed_steps_for_bpmn_task_identifier) > 0: + state_to_set = TaskState.COMPLETED tasks[spiff_task_id]["state"] = subprocess_state_overrides.get( - spiff_task_id, TaskState.FUTURE + spiff_task_id, state_to_set ) bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict( full_bpmn_process_dict ) - spiff_task = processor.__class__.get_task_by_bpmn_identifier( - step_details[-1].bpmn_task_identifier, bpmn_process_instance - ) - if spiff_task is not None and spiff_task.state != TaskState.READY: + last_step_detail_bpmn_task_identifier = step_details[-1].task_id + print(f"last_step_detail_bpmn_task_identifier: {last_step_detail_bpmn_task_identifier}") + uuid = UUID(last_step_detail_bpmn_task_identifier) + spiff_task = processor.bpmn_process_instance.get_task(uuid) + print(f"spiff_task: {spiff_task}") + # # workflow.complete_task_from_id(uuid) + # # spiff_task = processor.__class__.get_task_by_bpmn_identifier( + # # last_step_detail_bpmn_task_identifier, bpmn_process_instance + # # ) + if spiff_task is not None: #and spiff_task.state != TaskState.READY: + print("HEY WE HERE") + print(f"spiff_task: {spiff_task}") spiff_task.complete() + print(f"spiff_task2: {spiff_task}") spiff_tasks = None if all_tasks: @@ -619,6 +645,8 @@ def process_instance_task_list( spiff_tasks_by_process_id_and_task_name: dict[str, SpiffTask] = {} for spiff_task in spiff_tasks: row_id = f"{spiff_task.task_spec._wf_spec.name}:{spiff_task.task_spec.name}" + # if spiff_task.task_spec.name == 'Activity_0iajzy6' or spiff_task.task_spec.name == 'Activity_0pv92j7': + # print(f"spiff_task: {spiff_task} - {spiff_task.id}") if ( row_id not in spiff_tasks_by_process_id_and_task_name or spiff_task.last_state_change diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index bfe543d9..e9da2273 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -145,7 +145,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { path: `${apiPath}/${modifiedProcessModelId}/${params.process_instance_id}${queryParams}`, successCallback: setProcessInstance, }); - let taskParams = '?all_tasks=true'; + let taskParams = '?all_tasks=true&most_recent_tasks_only=true'; if (typeof params.spiff_step !== 'undefined') { taskParams = `${taskParams}&spiff_step=${params.spiff_step}`; } From 25e4edb9431a1787730ba6366f845aacad87affa Mon Sep 17 00:00:00 2001 From: Elizabeth Esswein Date: Thu, 9 Mar 2023 18:10:05 -0500 Subject: [PATCH 003/162] update process instance task list --- .../routes/process_instances_controller.py | 100 ++++++------------ 1 file changed, 32 insertions(+), 68 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 9608a705..77ceff5a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -1,10 +1,10 @@ """APIs for dealing with process groups, process models, and process instances.""" import base64 import json -from uuid import UUID from typing import Any from typing import Dict from typing import Optional +from uuid import UUID import flask.wrappers from flask import current_app @@ -15,9 +15,6 @@ from flask import request from flask.wrappers import Response from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState -from sqlalchemy import and_ -from sqlalchemy import or_ - from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.human_task import HumanTaskModel @@ -67,6 +64,8 @@ from spiffworkflow_backend.services.process_instance_service import ( ) from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.spec_file_service import SpecFileService +from sqlalchemy import and_ +from sqlalchemy import or_ def process_instance_create( @@ -576,71 +575,33 @@ def process_instance_task_list( processor = ProcessInstanceProcessor(process_instance) full_bpmn_process_dict = processor.full_bpmn_process_dict - tasks = full_bpmn_process_dict["tasks"] subprocesses = full_bpmn_process_dict["subprocesses"] steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details} - subprocess_state_overrides = {} - for step_detail in step_details: - # if step_detail.bpmn_task_identifier == 'Activity_0iajzy6': - # print(f"step_detail: {step_detail}") - if step_detail.task_id in tasks: - tasks[step_detail.task_id]["state"] = Task.task_state_name_to_int( - step_detail.task_state - ) - else: - for subprocess_id, subprocess_info in subprocesses.items(): - if step_detail.task_id in subprocess_info["tasks"]: - subprocess_info["tasks"][step_detail.task_id]["state"] = ( - Task.task_state_name_to_int(step_detail.task_state) - ) - subprocess_state_overrides[subprocess_id] = TaskState.WAITING + def restore_task(spiff_task: dict[str, Any], step_ended: float) -> None: + if spiff_task["last_state_change"] > step_ended: + spiff_task["state"] = Task.task_state_name_to_int("FUTURE") + spiff_task["data"] = {} - for subprocess_info in subprocesses.values(): - for spiff_task_id in subprocess_info["tasks"]: - if spiff_task_id not in steps_by_id: - subprocess_info["tasks"][spiff_task_id]["data"] = {} - subprocess_info["tasks"][spiff_task_id]["state"] = ( - subprocess_state_overrides.get(spiff_task_id, TaskState.FUTURE) - ) - - for spiff_task_id in tasks: - if spiff_task_id not in steps_by_id: - # if tasks[spiff_task_id]['task_spec'] == 'Activity_0iajzy6': - # print(f"tasks[spiff_task_id]: {tasks[spiff_task_id]}") - tasks[spiff_task_id]["data"] = {} - state_to_set = TaskState.FUTURE - if tasks[spiff_task_id]["state"] == TaskState.LIKELY: - # print("WE HERE") - previous_completed_steps_for_bpmn_task_identifier = [s for s in step_details if s.bpmn_task_identifier == tasks[spiff_task_id]['task_spec'] and s.task_state == "COMPLETED"] - # previous_completed_steps_for_bpmn_task_identifier = [s for s in step_details if s.task_state == "COMPLETED"] - # print(f"previous_completed_steps_for_bpmn_task_identifier: {previous_completed_steps_for_bpmn_task_identifier}") - if len(previous_completed_steps_for_bpmn_task_identifier) > 0: - state_to_set = TaskState.COMPLETED - tasks[spiff_task_id]["state"] = subprocess_state_overrides.get( - spiff_task_id, state_to_set - ) + if spiff_step > 0: + last_change = step_details[-1].end_in_seconds or 0 + for spiff_task in tasks.values(): + restore_task(spiff_task, last_change) + for spiff_task_id, subprocess in subprocesses.items(): + for spiff_task in subprocess["tasks"].values(): + restore_task(spiff_task, last_change) bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict( full_bpmn_process_dict ) - - last_step_detail_bpmn_task_identifier = step_details[-1].task_id - print(f"last_step_detail_bpmn_task_identifier: {last_step_detail_bpmn_task_identifier}") - uuid = UUID(last_step_detail_bpmn_task_identifier) - spiff_task = processor.bpmn_process_instance.get_task(uuid) - print(f"spiff_task: {spiff_task}") - # # workflow.complete_task_from_id(uuid) - # # spiff_task = processor.__class__.get_task_by_bpmn_identifier( - # # last_step_detail_bpmn_task_identifier, bpmn_process_instance - # # ) - if spiff_task is not None: #and spiff_task.state != TaskState.READY: - print("HEY WE HERE") - print(f"spiff_task: {spiff_task}") - spiff_task.complete() - print(f"spiff_task2: {spiff_task}") + if spiff_step > 0: + bpmn_process_instance.complete_task_from_id(UUID(step_details[-1].task_id)) + for subprocess_id, subprocess in bpmn_process_instance.subprocesses.items(): + if not subprocess.is_completed(): + task = bpmn_process_instance.get_task(subprocess_id) + task._set_state(TaskState.WAITING) spiff_tasks = None if all_tasks: @@ -656,23 +617,24 @@ def process_instance_task_list( subprocesses_by_child_task_ids, task_typename_by_task_id ) - tasks = [] spiff_tasks_to_process = spiff_tasks - if most_recent_tasks_only: spiff_tasks_by_process_id_and_task_name: dict[str, SpiffTask] = {} - for spiff_task in spiff_tasks: + current_tasks = {} + for spiff_task in spiff_tasks_to_process: row_id = f"{spiff_task.task_spec._wf_spec.name}:{spiff_task.task_spec.name}" - # if spiff_task.task_spec.name == 'Activity_0iajzy6' or spiff_task.task_spec.name == 'Activity_0pv92j7': - # print(f"spiff_task: {spiff_task} - {spiff_task.id}") + if spiff_task.state in [TaskState.READY, TaskState.WAITING]: + current_tasks[row_id] = spiff_task if ( row_id not in spiff_tasks_by_process_id_and_task_name - or spiff_task.last_state_change - > spiff_tasks_by_process_id_and_task_name[row_id].last_state_change + or spiff_task.state + > spiff_tasks_by_process_id_and_task_name[row_id].state ): spiff_tasks_by_process_id_and_task_name[row_id] = spiff_task + spiff_tasks_by_process_id_and_task_name.update(current_tasks) spiff_tasks_to_process = spiff_tasks_by_process_id_and_task_name.values() + response = [] for spiff_task in spiff_tasks_to_process: task_spiff_step: Optional[int] = None if str(spiff_task.id) in steps_by_id: @@ -686,9 +648,11 @@ def process_instance_task_list( calling_subprocess_task_id=calling_subprocess_task_id, task_spiff_step=task_spiff_step, ) - tasks.append(task) + if task.state in ["MAYBE", "LIKELY"]: + task.state = "FUTURE" + response.append(task) - return make_response(jsonify(tasks), 200) + return make_response(jsonify(response), 200) def process_instance_reset( From fe4694729c34fb48934901453fec1ccd75380f50 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 14 Mar 2023 10:51:12 -0400 Subject: [PATCH 004/162] do not write spiff step details to see how that changes performance --- .../services/process_instance_processor.py | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 0b2d73b3..b2ce4cfd 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1661,17 +1661,19 @@ class ProcessInstanceProcessor: ) -> None: """Do_engine_steps.""" - def spiff_step_details_mapping_builder( - task: SpiffTask, start: float, end: float - ) -> dict: - self._script_engine.environment.revise_state_with_task_data(task) - return self.spiff_step_details_mapping(task, start, end) - - step_delegate = StepDetailLoggingDelegate( - self.increment_spiff_step, spiff_step_details_mapping_builder - ) + # NOTE: Commenting out to test how this changes performance: + # def spiff_step_details_mapping_builder( + # task: SpiffTask, start: float, end: float + # ) -> dict: + # self._script_engine.environment.revise_state_with_task_data(task) + # return self.spiff_step_details_mapping(task, start, end) + # + # step_delegate = StepDetailLoggingDelegate( + # self.increment_spiff_step, spiff_step_details_mapping_builder + # ) task_model_delegate = TaskModelSavingDelegate( - secondary_engine_step_delegate=step_delegate, + # secondary_engine_step_delegate=step_delegate, + secondary_engine_step_delegate=None, serializer=self._serializer, process_instance=self.process_instance_model, ) From b8e0a8f665a97466cfa84ae6423405acad65537d Mon Sep 17 00:00:00 2001 From: jbirddog <100367399+jbirddog@users.noreply.github.com> Date: Tue, 14 Mar 2023 13:12:01 -0400 Subject: [PATCH 005/162] Move process instance locking to new queue table (#177) --- .../migrations/versions/e2972eaf8469_.py | 58 +++++++++ .../src/spiffworkflow_backend/__init__.py | 31 ++++- .../spiffworkflow_backend/config/default.py | 14 +++ .../load_database_models.py | 3 + .../models/process_instance.py | 3 - .../models/process_instance_queue.py | 30 +++++ .../routes/process_instances_controller.py | 10 ++ .../services/background_processing_service.py | 6 + .../services/process_instance_lock_service.py | 67 +++++++++++ .../services/process_instance_processor.py | 85 ++++---------- .../process_instance_queue_service.py | 110 ++++++++++++++++++ .../services/process_instance_service.py | 22 +++- .../services/workflow_execution_service.py | 13 ++- .../helpers/base_test.py | 6 + .../unit/test_process_instance_processor.py | 16 ++- 15 files changed, 389 insertions(+), 85 deletions(-) create mode 100644 spiffworkflow-backend/migrations/versions/e2972eaf8469_.py create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_queue.py create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_lock_service.py create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py diff --git a/spiffworkflow-backend/migrations/versions/e2972eaf8469_.py b/spiffworkflow-backend/migrations/versions/e2972eaf8469_.py new file mode 100644 index 00000000..f1796bfb --- /dev/null +++ b/spiffworkflow-backend/migrations/versions/e2972eaf8469_.py @@ -0,0 +1,58 @@ +"""empty message + +Revision ID: e2972eaf8469 +Revises: 389800c352ee +Create Date: 2023-03-13 22:00:21.579493 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = 'e2972eaf8469' +down_revision = '389800c352ee' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('process_instance_queue', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('process_instance_id', sa.Integer(), nullable=False), + sa.Column('run_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('priority', sa.Integer(), nullable=True), + sa.Column('locked_by', sa.String(length=80), nullable=True), + sa.Column('locked_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('status', sa.String(length=50), nullable=True), + sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), 'process_instance_queue', ['locked_at_in_seconds'], unique=False) + op.create_index(op.f('ix_process_instance_queue_locked_by'), 'process_instance_queue', ['locked_by'], unique=False) + op.create_index(op.f('ix_process_instance_queue_process_instance_id'), 'process_instance_queue', ['process_instance_id'], unique=True) + op.create_index(op.f('ix_process_instance_queue_status'), 'process_instance_queue', ['status'], unique=False) + op.alter_column('message_instance', 'user_id', + existing_type=mysql.INTEGER(), + nullable=True) + op.drop_column('process_instance', 'locked_by') + op.drop_column('process_instance', 'locked_at_in_seconds') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('process_instance', sa.Column('locked_at_in_seconds', mysql.INTEGER(), autoincrement=False, nullable=True)) + op.add_column('process_instance', sa.Column('locked_by', mysql.VARCHAR(length=80), nullable=True)) + op.alter_column('message_instance', 'user_id', + existing_type=mysql.INTEGER(), + nullable=False) + op.drop_index(op.f('ix_process_instance_queue_status'), table_name='process_instance_queue') + op.drop_index(op.f('ix_process_instance_queue_process_instance_id'), table_name='process_instance_queue') + op.drop_index(op.f('ix_process_instance_queue_locked_by'), table_name='process_instance_queue') + op.drop_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), table_name='process_instance_queue') + op.drop_table('process_instance_queue') + # ### end Alembic commands ### diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py index 3266ae76..d7041ecb 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py @@ -68,6 +68,15 @@ def start_scheduler( ) -> None: """Start_scheduler.""" scheduler = scheduler_class() + + # TODO: polling intervals for different jobs + polling_interval_in_seconds = app.config[ + "SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS" + ] + # TODO: add job to release locks to simplify other queries + # TODO: add job to delete completed entires + # TODO: add job to run old/low priority instances so they do not get drowned out + scheduler.add_job( BackgroundProcessingService(app).process_message_instances_with_app_context, "interval", @@ -76,7 +85,7 @@ def start_scheduler( scheduler.add_job( BackgroundProcessingService(app).process_waiting_process_instances, "interval", - seconds=10, + seconds=polling_interval_in_seconds, ) scheduler.add_job( BackgroundProcessingService(app).process_user_input_required_process_instances, @@ -86,6 +95,20 @@ def start_scheduler( scheduler.start() +def should_start_scheduler(app: flask.app.Flask) -> bool: + if not app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]: + return False + + # do not start the scheduler twice in flask debug mode but support code reloading + if ( + app.config["ENV_IDENTIFIER"] != "local_development" + or os.environ.get("WERKZEUG_RUN_MAIN") != "true" + ): + return False + + return True + + class NoOpCipher: def encrypt(self, value: str) -> bytes: return str.encode(value) @@ -134,11 +157,7 @@ def create_app() -> flask.app.Flask: app.json = MyJSONEncoder(app) - # do not start the scheduler twice in flask debug mode - if ( - app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"] - and os.environ.get("WERKZEUG_RUN_MAIN") != "true" - ): + if should_start_scheduler(app): start_scheduler(app) configure_sentry(app) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py index 04136d36..61a89f97 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py @@ -21,6 +21,12 @@ SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = ( environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") == "true" ) +SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS = int( + environ.get( + "SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS", + default="10", + ) +) SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = environ.get( "SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND", default="http://localhost:7001" ) @@ -147,6 +153,14 @@ SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP = environ.get( "SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP", default="everybody" ) +SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND = environ.get( + "SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND", default="greedy" +) + +SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB = environ.get( + "SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB", default="greedy" +) + # this is only used in CI. use SPIFFWORKFLOW_BACKEND_DATABASE_URI instead for real configuration SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD = environ.get( "SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD", default=None diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py b/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py index 376083cf..4b547158 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py @@ -66,5 +66,8 @@ from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401 from spiffworkflow_backend.models.bpmn_process_definition_relationship import ( BpmnProcessDefinitionRelationshipModel, ) # noqa: F401 +from spiffworkflow_backend.models.process_instance_queue import ( + ProcessInstanceQueueModel, +) # noqa: F401 add_listeners() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py index cbbceaba..f155494a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py @@ -105,9 +105,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): bpmn_version_control_identifier: str = db.Column(db.String(255)) spiff_step: int = db.Column(db.Integer) - locked_by: str | None = db.Column(db.String(80)) - locked_at_in_seconds: int | None = db.Column(db.Integer) - bpmn_xml_file_contents: str | None = None process_model_with_diagram_identifier: str | None = None diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_queue.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_queue.py new file mode 100644 index 00000000..ff81cf86 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_queue.py @@ -0,0 +1,30 @@ +"""Process_instance_queue.""" +from dataclasses import dataclass +from typing import Union + +from sqlalchemy import ForeignKey + +from spiffworkflow_backend.models.db import db +from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel + + +@dataclass +class ProcessInstanceQueueModel(SpiffworkflowBaseDBModel): + """ProcessInstanceQueueModel.""" + + __tablename__ = "process_instance_queue" + + id: int = db.Column(db.Integer, primary_key=True) + process_instance_id: int = db.Column( + ForeignKey(ProcessInstanceModel.id), index=True, unique=True, nullable=False # type: ignore + ) + run_at_in_seconds: int = db.Column(db.Integer) + priority: int = db.Column(db.Integer) + locked_by: Union[str, None] = db.Column(db.String(80), index=True, nullable=True) + locked_at_in_seconds: Union[int, None] = db.Column( + db.Integer, index=True, nullable=True + ) + status: str = db.Column(db.String(50), index=True) + updated_at_in_seconds: int = db.Column(db.Integer) + created_at_in_seconds: int = db.Column(db.Integer) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index f6c9ff66..252b9264 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -30,6 +30,9 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSc from spiffworkflow_backend.models.process_instance_metadata import ( ProcessInstanceMetadataModel, ) +from spiffworkflow_backend.models.process_instance_queue import ( + ProcessInstanceQueueModel, +) from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, ) @@ -55,6 +58,9 @@ from spiffworkflow_backend.services.message_service import MessageService from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) +from spiffworkflow_backend.services.process_instance_queue_service import ( + ProcessInstanceQueueService, +) from spiffworkflow_backend.services.process_instance_report_service import ( ProcessInstanceReportFilter, ) @@ -92,6 +98,7 @@ def process_instance_create( process_model_identifier, g.user ) ) + ProcessInstanceQueueService.enqueue(process_instance) return Response( json.dumps(ProcessInstanceModelSchema().dump(process_instance)), status=201, @@ -413,6 +420,9 @@ def process_instance_delete( db.session.query(SpiffStepDetailsModel).filter_by( process_instance_id=process_instance.id ).delete() + db.session.query(ProcessInstanceQueueModel).filter_by( + process_instance_id=process_instance.id + ).delete() db.session.delete(process_instance) db.session.commit() return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/background_processing_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/background_processing_service.py index dc7e1e7e..3ce0e8f2 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/background_processing_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/background_processing_service.py @@ -3,6 +3,9 @@ import flask from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus from spiffworkflow_backend.services.message_service import MessageService +from spiffworkflow_backend.services.process_instance_lock_service import ( + ProcessInstanceLockService, +) from spiffworkflow_backend.services.process_instance_service import ( ProcessInstanceService, ) @@ -18,11 +21,13 @@ class BackgroundProcessingService: def process_waiting_process_instances(self) -> None: """Since this runs in a scheduler, we need to specify the app context as well.""" with self.app.app_context(): + ProcessInstanceLockService.set_thread_local_locking_context("bg:waiting") ProcessInstanceService.do_waiting() def process_user_input_required_process_instances(self) -> None: """Since this runs in a scheduler, we need to specify the app context as well.""" with self.app.app_context(): + ProcessInstanceLockService.set_thread_local_locking_context("bg:userinput") ProcessInstanceService.do_waiting( ProcessInstanceStatus.user_input_required.value ) @@ -30,4 +35,5 @@ class BackgroundProcessingService: def process_message_instances_with_app_context(self) -> None: """Since this runs in a scheduler, we need to specify the app context as well.""" with self.app.app_context(): + ProcessInstanceLockService.set_thread_local_locking_context("bg:messages") MessageService.correlate_all_message_instances() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_lock_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_lock_service.py new file mode 100644 index 00000000..5c3cd935 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_lock_service.py @@ -0,0 +1,67 @@ +import threading +from typing import Any +from typing import List +from typing import Optional + +from flask import current_app + +from spiffworkflow_backend.models.process_instance_queue import ( + ProcessInstanceQueueModel, +) + + +class ProcessInstanceLockService: + """TODO: comment.""" + + @classmethod + def set_thread_local_locking_context(cls, domain: str) -> None: + current_app.config["THREAD_LOCAL_DATA"].lock_service_context = { + "domain": domain, + "uuid": current_app.config["PROCESS_UUID"], + "thread_id": threading.get_ident(), + "locks": {}, + } + + @classmethod + def get_thread_local_locking_context(cls) -> dict[str, Any]: + tld = current_app.config["THREAD_LOCAL_DATA"] + if not hasattr(tld, "lock_service_context"): + cls.set_thread_local_locking_context("web") + return tld.lock_service_context # type: ignore + + @classmethod + def locked_by(cls) -> str: + ctx = cls.get_thread_local_locking_context() + return f"{ctx['domain']}:{ctx['uuid']}:{ctx['thread_id']}" + + @classmethod + def lock( + cls, process_instance_id: int, queue_entry: ProcessInstanceQueueModel + ) -> None: + ctx = cls.get_thread_local_locking_context() + ctx["locks"][process_instance_id] = queue_entry + + @classmethod + def lock_many(cls, queue_entries: List[ProcessInstanceQueueModel]) -> List[int]: + ctx = cls.get_thread_local_locking_context() + new_locks = {entry.process_instance_id: entry for entry in queue_entries} + new_lock_ids = list(new_locks.keys()) + ctx["locks"].update(new_locks) + return new_lock_ids + + @classmethod + def unlock(cls, process_instance_id: int) -> ProcessInstanceQueueModel: + ctx = cls.get_thread_local_locking_context() + return ctx["locks"].pop(process_instance_id) # type: ignore + + @classmethod + def try_unlock( + cls, process_instance_id: int + ) -> Optional[ProcessInstanceQueueModel]: + ctx = cls.get_thread_local_locking_context() + return ctx["locks"].pop(process_instance_id, None) # type: ignore + + @classmethod + def has_lock(cls, process_instance_id: int) -> bool: + ctx = cls.get_thread_local_locking_context() + return process_instance_id in ctx["locks"] diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index b2ce4cfd..f78a3fd4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -51,7 +51,6 @@ from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ign from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore -from sqlalchemy import text from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel @@ -89,6 +88,12 @@ from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.scripts.script import Script from spiffworkflow_backend.services.custom_parser import MyCustomParser from spiffworkflow_backend.services.file_system_service import FileSystemService +from spiffworkflow_backend.services.process_instance_lock_service import ( + ProcessInstanceLockService, +) +from spiffworkflow_backend.services.process_instance_queue_service import ( + ProcessInstanceQueueService, +) from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate from spiffworkflow_backend.services.spec_file_service import SpecFileService @@ -143,14 +148,6 @@ class MissingProcessInfoError(Exception): """MissingProcessInfoError.""" -class ProcessInstanceIsAlreadyLockedError(Exception): - pass - - -class ProcessInstanceLockedBySomethingElseError(Exception): - pass - - class SpiffStepDetailIsMissingError(Exception): pass @@ -1253,6 +1250,8 @@ class ProcessInstanceProcessor: self.bpmn_process_instance.catch(event_definition) except Exception as e: print(e) + + # TODO: do_engine_steps without a lock self.do_engine_steps(save=True) def add_step(self, step: Union[dict, None] = None) -> None: @@ -1543,55 +1542,13 @@ class ProcessInstanceProcessor: # current_app.logger.debug(f"the_status: {the_status} for instance {self.process_instance_model.id}") return the_status - # inspiration from https://github.com/collectiveidea/delayed_job_active_record/blob/master/lib/delayed/backend/active_record.rb - # could consider borrowing their "cleanup all my locks when the app quits" idea as well and - # implement via https://docs.python.org/3/library/atexit.html + # TODO: replace with implicit/more granular locking in workflow execution service def lock_process_instance(self, lock_prefix: str) -> None: - current_app.config["THREAD_LOCAL_DATA"].locked_by_prefix = lock_prefix - locked_by = f"{lock_prefix}_{current_app.config['PROCESS_UUID']}" - current_time_in_seconds = round(time.time()) - lock_expiry_in_seconds = ( - current_time_in_seconds - - current_app.config[ - "SPIFFWORKFLOW_BACKEND_ALLOW_CONFISCATING_LOCK_AFTER_SECONDS" - ] - ) - - query_text = text( - "UPDATE process_instance SET locked_at_in_seconds =" - " :current_time_in_seconds, locked_by = :locked_by where id = :id AND" - " (locked_by IS NULL OR locked_at_in_seconds < :lock_expiry_in_seconds);" - ).execution_options(autocommit=True) - result = db.engine.execute( - query_text, - id=self.process_instance_model.id, - current_time_in_seconds=current_time_in_seconds, - locked_by=locked_by, - lock_expiry_in_seconds=lock_expiry_in_seconds, - ) - # it seems like autocommit is working above (we see the statement in debug logs) but sqlalchemy doesn't - # seem to update properly so tell it to commit as well. - # if we omit this line then querying the record from a unit test doesn't ever show the record as locked. - db.session.commit() - if result.rowcount < 1: - raise ProcessInstanceIsAlreadyLockedError( - f"Cannot lock process instance {self.process_instance_model.id}. " - "It has already been locked." - ) + ProcessInstanceQueueService.dequeue(self.process_instance_model) + # TODO: replace with implicit/more granular locking in workflow execution service def unlock_process_instance(self, lock_prefix: str) -> None: - current_app.config["THREAD_LOCAL_DATA"].locked_by_prefix = None - locked_by = f"{lock_prefix}_{current_app.config['PROCESS_UUID']}" - if self.process_instance_model.locked_by != locked_by: - raise ProcessInstanceLockedBySomethingElseError( - f"Cannot unlock process instance {self.process_instance_model.id}." - f"It locked by {self.process_instance_model.locked_by}" - ) - - self.process_instance_model.locked_by = None - self.process_instance_model.locked_at_in_seconds = None - db.session.add(self.process_instance_model) - db.session.commit() + ProcessInstanceQueueService.enqueue(self.process_instance_model) def process_bpmn_messages(self) -> None: """Process_bpmn_messages.""" @@ -1657,7 +1614,7 @@ class ProcessInstanceProcessor: self, exit_at: None = None, save: bool = False, - execution_strategy_name: str = "greedy", + execution_strategy_name: Optional[str] = None, ) -> None: """Do_engine_steps.""" @@ -1677,6 +1634,12 @@ class ProcessInstanceProcessor: serializer=self._serializer, process_instance=self.process_instance_model, ) + + if execution_strategy_name is None: + execution_strategy_name = current_app.config[ + "SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB" + ] + execution_strategy = execution_strategy_named( execution_strategy_name, task_model_delegate ) @@ -1692,12 +1655,9 @@ class ProcessInstanceProcessor: # log the spiff step details so we know what is processing the process # instance when a human task has a timer event. def log_spiff_step_details(self, step_details: Any) -> None: - tld = current_app.config["THREAD_LOCAL_DATA"] - if hasattr(tld, "locked_by_prefix") and len(step_details) > 0: - locked_by_prefix = tld.locked_by_prefix - message = ( - f"ADDING SPIFF BULK STEP DETAILS: {locked_by_prefix}: {step_details}" - ) + if ProcessInstanceLockService.has_lock(self.process_instance_model.id): + locked_by = ProcessInstanceLockService.locked_by() + message = f"ADDING SPIFF BULK STEP DETAILS: {locked_by}: {step_details}" current_app.logger.debug(message) def cancel_notify(self) -> None: @@ -1712,6 +1672,7 @@ class ProcessInstanceProcessor: bpmn_process_instance.signal("cancel") # generate a cancel signal. bpmn_process_instance.catch(CancelEventDefinition()) # Due to this being static, can't save granular step details in this case + # TODO: do_engine_steps without a lock bpmn_process_instance.do_engine_steps() except WorkflowTaskException as we: raise ApiError.from_workflow_exception("task_error", str(we), we) from we diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py new file mode 100644 index 00000000..d9f900b2 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py @@ -0,0 +1,110 @@ +import time +from typing import List + +from flask import current_app + +from spiffworkflow_backend.models.db import db +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance_queue import ( + ProcessInstanceQueueModel, +) +from spiffworkflow_backend.services.process_instance_lock_service import ( + ProcessInstanceLockService, +) + + +class ProcessInstanceIsAlreadyLockedError(Exception): + pass + + +class ProcessInstanceQueueService: + """TODO: comment.""" + + @staticmethod + def enqueue(process_instance: ProcessInstanceModel) -> None: + queue_item = ProcessInstanceLockService.try_unlock(process_instance.id) + + if queue_item is None: + queue_item = ProcessInstanceQueueModel( + process_instance_id=process_instance.id + ) + + # TODO: configurable params (priority/run_at) + queue_item.run_at_in_seconds = round(time.time()) + queue_item.priority = 2 + queue_item.status = process_instance.status + queue_item.locked_by = None + queue_item.locked_at_in_seconds = None + + db.session.add(queue_item) + db.session.commit() + + @staticmethod + def dequeue(process_instance: ProcessInstanceModel) -> None: + if ProcessInstanceLockService.has_lock(process_instance.id): + return + + locked_by = ProcessInstanceLockService.locked_by() + + db.session.query(ProcessInstanceQueueModel).filter( + ProcessInstanceQueueModel.process_instance_id == process_instance.id, + ProcessInstanceQueueModel.locked_by.is_(None), # type: ignore + ).update( + { + "locked_by": locked_by, + } + ) + + db.session.commit() + + queue_entry = ( + db.session.query(ProcessInstanceQueueModel) + .filter( + ProcessInstanceQueueModel.process_instance_id == process_instance.id, + ProcessInstanceQueueModel.locked_by == locked_by, + ) + .first() + ) + + if queue_entry is None: + raise ProcessInstanceIsAlreadyLockedError( + f"Cannot lock process instance {process_instance.id}. " + "It has already been locked or has not been enqueued." + ) + + ProcessInstanceLockService.lock(process_instance.id, queue_entry) + + @staticmethod + def dequeue_many( + status_value: str = ProcessInstanceStatus.waiting.value, + ) -> List[int]: + locked_by = ProcessInstanceLockService.locked_by() + + # TODO: configurable params (priority/run_at/limit) + db.session.query(ProcessInstanceQueueModel).filter( + ProcessInstanceQueueModel.status == status_value, + ProcessInstanceQueueModel.locked_by.is_(None), # type: ignore + ).update( + { + "locked_by": locked_by, + } + ) + + db.session.commit() + + queue_entries = ( + db.session.query(ProcessInstanceQueueModel) + .filter( + ProcessInstanceQueueModel.status == status_value, + ProcessInstanceQueueModel.locked_by == locked_by, + ) + .all() + ) + + locked_ids = ProcessInstanceLockService.lock_many(queue_entries) + + if len(locked_ids) > 0: + current_app.logger.info(f"{locked_by} dequeued_many: {locked_ids}") + + return locked_ids diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index b3959ea8..dfeb2bde 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -29,10 +29,13 @@ from spiffworkflow_backend.services.authorization_service import AuthorizationSe from spiffworkflow_backend.services.git_service import GitCommandError from spiffworkflow_backend.services.git_service import GitService from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_instance_queue_service import ( ProcessInstanceIsAlreadyLockedError, ) -from spiffworkflow_backend.services.process_instance_processor import ( - ProcessInstanceProcessor, +from spiffworkflow_backend.services.process_instance_queue_service import ( + ProcessInstanceQueueService, ) from spiffworkflow_backend.services.process_model_service import ProcessModelService @@ -81,9 +84,15 @@ class ProcessInstanceService: @staticmethod def do_waiting(status_value: str = ProcessInstanceStatus.waiting.value) -> None: """Do_waiting.""" + locked_process_instance_ids = ProcessInstanceQueueService.dequeue_many( + status_value + ) + if len(locked_process_instance_ids) == 0: + return + records = ( db.session.query(ProcessInstanceModel) - .filter(ProcessInstanceModel.status == status_value) + .filter(ProcessInstanceModel.id.in_(locked_process_instance_ids)) # type: ignore .all() ) process_instance_lock_prefix = "Background" @@ -97,7 +106,12 @@ class ProcessInstanceService: processor = ProcessInstanceProcessor(process_instance) processor.lock_process_instance(process_instance_lock_prefix) locked = True - processor.do_engine_steps(save=True) + execution_strategy_name = current_app.config[ + "SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND" + ] + processor.do_engine_steps( + save=True, execution_strategy_name=execution_strategy_name + ) except ProcessInstanceIsAlreadyLockedError: continue except Exception as e: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 864885e5..1ab22ee4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -4,6 +4,7 @@ from typing import Callable from typing import List from typing import Optional +from flask import current_app from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore from SpiffWorkflow.exceptions import SpiffWorkflowException # type: ignore @@ -19,6 +20,9 @@ from spiffworkflow_backend.models.message_instance_correlation import ( from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.task import TaskModel # noqa: F401 +from spiffworkflow_backend.services.process_instance_lock_service import ( + ProcessInstanceLockService, +) from spiffworkflow_backend.services.task_service import JsonDataDict from spiffworkflow_backend.services.task_service import TaskService @@ -202,7 +206,7 @@ class ExecutionStrategy: class GreedyExecutionStrategy(ExecutionStrategy): - """The common execution strategy. This will greedily run all engine step without stopping.""" + """The common execution strategy. This will greedily run all engine steps without stopping.""" def do_engine_steps( self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None @@ -286,9 +290,16 @@ class WorkflowExecutionService: def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None: """Do_engine_steps.""" + if not ProcessInstanceLockService.has_lock(self.process_instance_model.id): + # TODO: can't be an exception yet - believe there are flows that are not locked. + current_app.logger.error( + "The current thread has not obtained a lock for this process instance.", + ) + try: self.bpmn_process_instance.refresh_waiting_tasks() + # TODO: implicit re-entrant locks here `with_dequeued` self.execution_strategy.do_engine_steps(self.bpmn_process_instance, exit_at) if self.bpmn_process_instance.is_completed(): diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py index 3b1c3344..704d7379 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py @@ -25,6 +25,9 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.file_system_service import FileSystemService +from spiffworkflow_backend.services.process_instance_queue_service import ( + ProcessInstanceQueueService, +) from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.user_service import UserService @@ -308,6 +311,9 @@ class BaseTest: ) db.session.add(process_instance) db.session.commit() + + ProcessInstanceQueueService.enqueue(process_instance) + return process_instance @classmethod diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index e1618f61..3452dcf1 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -18,15 +18,12 @@ from spiffworkflow_backend.services.authorization_service import AuthorizationSe from spiffworkflow_backend.services.authorization_service import ( UserDoesNotHaveAccessToTaskError, ) -from spiffworkflow_backend.services.process_instance_processor import ( - ProcessInstanceIsAlreadyLockedError, -) -from spiffworkflow_backend.services.process_instance_processor import ( - ProcessInstanceLockedBySomethingElseError, -) from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) +from spiffworkflow_backend.services.process_instance_queue_service import ( + ProcessInstanceIsAlreadyLockedError, +) from spiffworkflow_backend.services.process_instance_service import ( ProcessInstanceService, ) @@ -436,7 +433,8 @@ class TestProcessInstanceProcessor(BaseTest): assert len(process_instance.active_human_tasks) == 1 assert initial_human_task_id == process_instance.active_human_tasks[0].id - def test_it_can_lock_and_unlock_a_process_instance( + # TODO: port this test to queue_service test + def xxx_test_it_can_lock_and_unlock_a_process_instance( self, app: Flask, client: FlaskClient, @@ -465,8 +463,8 @@ class TestProcessInstanceProcessor(BaseTest): with pytest.raises(ProcessInstanceIsAlreadyLockedError): processor.lock_process_instance("TEST") - with pytest.raises(ProcessInstanceLockedBySomethingElseError): - processor.unlock_process_instance("TEST2") + # with pytest.raises(ProcessInstanceLockedBySomethingElseError): + # processor.unlock_process_instance("TEST2") processor.unlock_process_instance("TEST") From 91c9d531a7c2c36bf194f6b51bfa9fdaba054aac Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 14 Mar 2023 13:24:12 -0400 Subject: [PATCH 006/162] Remove nav from task show page --- .../services/process_instance_processor.py | 4 ---- spiffworkflow-frontend/src/routes/TaskShow.tsx | 5 ++++- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index b2ce4cfd..e095ff15 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -97,9 +97,6 @@ from spiffworkflow_backend.services.user_service import UserService from spiffworkflow_backend.services.workflow_execution_service import ( execution_strategy_named, ) -from spiffworkflow_backend.services.workflow_execution_service import ( - StepDetailLoggingDelegate, -) from spiffworkflow_backend.services.workflow_execution_service import ( TaskModelSavingDelegate, ) @@ -1660,7 +1657,6 @@ class ProcessInstanceProcessor: execution_strategy_name: str = "greedy", ) -> None: """Do_engine_steps.""" - # NOTE: Commenting out to test how this changes performance: # def spiff_step_details_mapping_builder( # task: SpiffTask, start: float, end: float diff --git a/spiffworkflow-frontend/src/routes/TaskShow.tsx b/spiffworkflow-frontend/src/routes/TaskShow.tsx index fbf0bd81..058ee0b5 100644 --- a/spiffworkflow-frontend/src/routes/TaskShow.tsx +++ b/spiffworkflow-frontend/src/routes/TaskShow.tsx @@ -23,7 +23,7 @@ import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; export default function TaskShow() { const [task, setTask] = useState(null); - const [userTasks, setUserTasks] = useState(null); + const [userTasks] = useState(null); const params = useParams(); const navigate = useNavigate(); const [disabled, setDisabled] = useState(false); @@ -33,6 +33,8 @@ export default function TaskShow() { useEffect(() => { const processResult = (result: ProcessInstanceTask) => { setTask(result); + setDisabled(false); + /* Disable call to load previous tasks -- do not display menu. const url = `/v1.0/process-instances/for-me/${modifyProcessIdentifierForPathParam( result.process_model_identifier )}/${params.process_instance_id}/task-info`; @@ -52,6 +54,7 @@ export default function TaskShow() { addError(error); }, }); + */ }; HttpService.makeCallToBackend({ path: `/tasks/${params.process_instance_id}/${params.task_id}`, From fdef1154ffe6eed9b1629094240c71aa1b5807e2 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 14 Mar 2023 14:02:15 -0400 Subject: [PATCH 007/162] remove any filterable columns when doing a "clear" or "reset" on the filter form. Also clear out the process initiator. --- .../src/components/ProcessInstanceListTable.tsx | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 4b0498b7..9f8d258c 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -702,6 +702,14 @@ export default function ProcessInstanceListTable({ setEndFromTime(''); setEndToDate(''); setEndToTime(''); + setProcessInitiatorSelection(null); + setProcessInitiatorText(''); + + if (reportMetadata) { + reportMetadata.columns = reportMetadata.columns.filter( + (column) => !column.filterable + ); + } }; const processInstanceReportDidChange = (selection: any, mode?: string) => { From 4261874bba8b44258f3346034ae73b826181bd92 Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 14 Mar 2023 15:08:10 -0400 Subject: [PATCH 008/162] add list with just one user for bootstrapping --- spiffworkflow-backend/keycloak/test_user_lists/admin | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 spiffworkflow-backend/keycloak/test_user_lists/admin diff --git a/spiffworkflow-backend/keycloak/test_user_lists/admin b/spiffworkflow-backend/keycloak/test_user_lists/admin new file mode 100644 index 00000000..aa676cd9 --- /dev/null +++ b/spiffworkflow-backend/keycloak/test_user_lists/admin @@ -0,0 +1,2 @@ +email,spiffworkflow-employeeid +admin@spiffworkflow.org From 8e7993b0ef61940cd2e3719ed74268de9d28ec20 Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 14 Mar 2023 17:54:20 -0400 Subject: [PATCH 009/162] put back spiff step details for today --- .../services/process_instance_processor.py | 28 ++++++++++--------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 54432f67..dd2b405a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -102,6 +102,9 @@ from spiffworkflow_backend.services.user_service import UserService from spiffworkflow_backend.services.workflow_execution_service import ( execution_strategy_named, ) +from spiffworkflow_backend.services.workflow_execution_service import ( + StepDetailLoggingDelegate, +) from spiffworkflow_backend.services.workflow_execution_service import ( TaskModelSavingDelegate, ) @@ -1613,20 +1616,19 @@ class ProcessInstanceProcessor: save: bool = False, execution_strategy_name: Optional[str] = None, ) -> None: - """Do_engine_steps.""" - # NOTE: Commenting out to test how this changes performance: - # def spiff_step_details_mapping_builder( - # task: SpiffTask, start: float, end: float - # ) -> dict: - # self._script_engine.environment.revise_state_with_task_data(task) - # return self.spiff_step_details_mapping(task, start, end) - # - # step_delegate = StepDetailLoggingDelegate( - # self.increment_spiff_step, spiff_step_details_mapping_builder - # ) + # NOTE: To avoid saving spiff step details, just comment out this function and the step_delegate and + # set the TaskModelSavingDelegate's secondary_engine_step_delegate to None. + def spiff_step_details_mapping_builder( + task: SpiffTask, start: float, end: float + ) -> dict: + self._script_engine.environment.revise_state_with_task_data(task) + return self.spiff_step_details_mapping(task, start, end) + + step_delegate = StepDetailLoggingDelegate( + self.increment_spiff_step, spiff_step_details_mapping_builder + ) task_model_delegate = TaskModelSavingDelegate( - # secondary_engine_step_delegate=step_delegate, - secondary_engine_step_delegate=None, + secondary_engine_step_delegate=step_delegate, serializer=self._serializer, process_instance=self.process_instance_model, ) From 68e886a324003f7cb7aba541cb4cc5485d53cb4c Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 14 Mar 2023 18:10:49 -0400 Subject: [PATCH 010/162] exclude connexion logging even in debug mode --- .../services/logging_service.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py index 401d071e..77adeaf3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py @@ -161,6 +161,9 @@ def setup_logger(app: Flask) -> None: spiff_logger_filehandler.setLevel(spiff_log_level) spiff_logger_filehandler.setFormatter(log_formatter) + # these loggers have been deemed too verbose to be useful + garbage_loggers_to_exclude = ["connexion"] + # make all loggers act the same for name in logging.root.manager.loggerDict: # use a regex so spiffworkflow_backend isn't filtered out @@ -172,10 +175,15 @@ def setup_logger(app: Flask) -> None: the_logger.propagate = False the_logger.addHandler(spiff_logger_filehandler) else: - if len(the_logger.handlers) < 1: - # it's very verbose, so only add handlers for the obscure loggers when log level is DEBUG - if upper_log_level_string == "DEBUG": - the_logger.addHandler(logging.StreamHandler(sys.stdout)) + # it's very verbose, so only add handlers for the obscure loggers when log level is DEBUG + if upper_log_level_string == "DEBUG": + if len(the_logger.handlers) < 1: + exclude_logger_name_from_logging = False + for garbage_logger in garbage_loggers_to_exclude: + if name.startswith(garbage_logger): + exclude_logger_name_from_logging = True + if not exclude_logger_name_from_logging: + the_logger.addHandler(logging.StreamHandler(sys.stdout)) for the_handler in the_logger.handlers: the_handler.setFormatter(log_formatter) the_handler.setLevel(log_level) From 190e02dd65f92630cee8e1180183bfd66adebb01 Mon Sep 17 00:00:00 2001 From: jbirddog <100367399+jbirddog@users.noreply.github.com> Date: Tue, 14 Mar 2023 20:22:57 -0400 Subject: [PATCH 011/162] Safe asserts (#180) --- .../services/assertion_service.py | 18 ++++++++++++++++++ .../services/workflow_execution_service.py | 15 +++++++++------ 2 files changed, 27 insertions(+), 6 deletions(-) create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/services/assertion_service.py diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/assertion_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/assertion_service.py new file mode 100644 index 00000000..b9f7c61b --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/assertion_service.py @@ -0,0 +1,18 @@ +"""Assertion_service.""" +import contextlib +from typing import Generator + +import sentry_sdk +from flask import current_app + + +@contextlib.contextmanager +def safe_assertion(condition: bool) -> Generator[bool, None, None]: + try: + yield True + except AssertionError as e: + if not condition: + sentry_sdk.capture_exception(e) + current_app.logger.exception(e) + if current_app.config["ENV_IDENTIFIER"] == "local_development": + raise e diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 1ab22ee4..386f2054 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -4,7 +4,6 @@ from typing import Callable from typing import List from typing import Optional -from flask import current_app from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore from SpiffWorkflow.exceptions import SpiffWorkflowException # type: ignore @@ -20,6 +19,7 @@ from spiffworkflow_backend.models.message_instance_correlation import ( from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.task import TaskModel # noqa: F401 +from spiffworkflow_backend.services.assertion_service import safe_assertion from spiffworkflow_backend.services.process_instance_lock_service import ( ProcessInstanceLockService, ) @@ -290,11 +290,14 @@ class WorkflowExecutionService: def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None: """Do_engine_steps.""" - if not ProcessInstanceLockService.has_lock(self.process_instance_model.id): - # TODO: can't be an exception yet - believe there are flows that are not locked. - current_app.logger.error( - "The current thread has not obtained a lock for this process instance.", - ) + with safe_assertion( + ProcessInstanceLockService.has_lock(self.process_instance_model.id) + ) as tripped: + if tripped: + raise AssertionError( + "The current thread has not obtained a lock for this process" + f" instance ({self.process_instance_model.id})." + ) try: self.bpmn_process_instance.refresh_waiting_tasks() From 34083f437d36d472267bfd67d5e717fa207dcea3 Mon Sep 17 00:00:00 2001 From: jbirddog <100367399+jbirddog@users.noreply.github.com> Date: Wed, 15 Mar 2023 10:52:06 -0400 Subject: [PATCH 012/162] Provide more details in process instance locking errors (#181) --- .../routes/process_instances_controller.py | 18 ++++++++++++++++-- .../services/process_instance_queue_service.py | 15 ++++++++++++--- 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 252b9264..ec3015dd 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -55,9 +55,18 @@ from spiffworkflow_backend.services.error_handling_service import ErrorHandlingS from spiffworkflow_backend.services.git_service import GitCommandError from spiffworkflow_backend.services.git_service import GitService from spiffworkflow_backend.services.message_service import MessageService +from spiffworkflow_backend.services.process_instance_lock_service import ( + ProcessInstanceLockService, +) from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) +from spiffworkflow_backend.services.process_instance_queue_service import ( + ProcessInstanceIsAlreadyLockedError, +) +from spiffworkflow_backend.services.process_instance_queue_service import ( + ProcessInstanceIsNotEnqueuedError, +) from spiffworkflow_backend.services.process_instance_queue_service import ( ProcessInstanceQueueService, ) @@ -129,7 +138,11 @@ def process_instance_run( try: processor.lock_process_instance("Web") processor.do_engine_steps(save=True) - except ApiError as e: + except ( + ApiError, + ProcessInstanceIsNotEnqueuedError, + ProcessInstanceIsAlreadyLockedError, + ) as e: ErrorHandlingService().handle_error(processor, e) raise e except Exception as e: @@ -143,7 +156,8 @@ def process_instance_run( task=task, ) from e finally: - processor.unlock_process_instance("Web") + if ProcessInstanceLockService.has_lock(process_instance.id): + processor.unlock_process_instance("Web") if not current_app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]: MessageService.correlate_all_message_instances() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py index d9f900b2..d75d903f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py @@ -14,6 +14,10 @@ from spiffworkflow_backend.services.process_instance_lock_service import ( ) +class ProcessInstanceIsNotEnqueuedError(Exception): + pass + + class ProcessInstanceIsAlreadyLockedError(Exception): pass @@ -62,15 +66,20 @@ class ProcessInstanceQueueService: db.session.query(ProcessInstanceQueueModel) .filter( ProcessInstanceQueueModel.process_instance_id == process_instance.id, - ProcessInstanceQueueModel.locked_by == locked_by, ) .first() ) if queue_entry is None: + raise ProcessInstanceIsNotEnqueuedError( + f"{locked_by} cannot lock process instance {process_instance.id}. It" + " has not been enqueued." + ) + + if queue_entry.locked_by != locked_by: raise ProcessInstanceIsAlreadyLockedError( - f"Cannot lock process instance {process_instance.id}. " - "It has already been locked or has not been enqueued." + f"{locked_by} cannot lock process instance {process_instance.id}. " + f"It has already been locked by {queue_entry.locked_by}." ) ProcessInstanceLockService.lock(process_instance.id, queue_entry) From 9d662f9facbc07741ef45ff2d643604b697e7b9b Mon Sep 17 00:00:00 2001 From: Elizabeth Esswein Date: Wed, 15 Mar 2023 11:10:02 -0400 Subject: [PATCH 013/162] remove ununsed variable --- .../routes/process_instances_controller.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 8501960a..3aaa418f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -600,7 +600,7 @@ def process_instance_task_list( last_change = step_details[-1].end_in_seconds or 0 for spiff_task in tasks.values(): restore_task(spiff_task, last_change) - for spiff_task_id, subprocess in subprocesses.items(): + for subprocess in subprocesses.values(): for spiff_task in subprocess["tasks"].values(): restore_task(spiff_task, last_change) From 6abc3dc69dcf5ef6acfb1b76ea5f09938e33fcce Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 15 Mar 2023 11:25:15 -0400 Subject: [PATCH 014/162] unit tests are passing w/ burnettk --- spiffworkflow-backend/migrations/env.py | 2 + .../{389800c352ee_.py => 99f1b5156b06_.py} | 70 ++++++++++----- .../migrations/versions/e2972eaf8469_.py | 58 ------------ .../src/spiffworkflow_backend/models/task.py | 14 ++- .../services/process_instance_processor.py | 88 ++++++++++++++++--- .../services/task_service.py | 68 ++++++++++---- .../services/workflow_execution_service.py | 10 ++- ...ling.py => test_error_handling_service.py} | 1 + .../unit/test_process_instance_processor.py | 18 ++++ 9 files changed, 211 insertions(+), 118 deletions(-) rename spiffworkflow-backend/migrations/versions/{389800c352ee_.py => 99f1b5156b06_.py} (92%) delete mode 100644 spiffworkflow-backend/migrations/versions/e2972eaf8469_.py rename spiffworkflow-backend/tests/spiffworkflow_backend/unit/{test_error_handling.py => test_error_handling_service.py} (98%) diff --git a/spiffworkflow-backend/migrations/env.py b/spiffworkflow-backend/migrations/env.py index 630e381a..68feded2 100644 --- a/spiffworkflow-backend/migrations/env.py +++ b/spiffworkflow-backend/migrations/env.py @@ -1,3 +1,5 @@ +from __future__ import with_statement + import logging from logging.config import fileConfig diff --git a/spiffworkflow-backend/migrations/versions/389800c352ee_.py b/spiffworkflow-backend/migrations/versions/99f1b5156b06_.py similarity index 92% rename from spiffworkflow-backend/migrations/versions/389800c352ee_.py rename to spiffworkflow-backend/migrations/versions/99f1b5156b06_.py index bfcf5da8..9407aeaf 100644 --- a/spiffworkflow-backend/migrations/versions/389800c352ee_.py +++ b/spiffworkflow-backend/migrations/versions/99f1b5156b06_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: 389800c352ee +Revision ID: 99f1b5156b06 Revises: -Create Date: 2023-03-07 10:40:43.709777 +Create Date: 2023-03-14 17:23:22.667853 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. -revision = '389800c352ee' +revision = '99f1b5156b06' down_revision = None branch_labels = None depends_on = None @@ -166,8 +166,6 @@ def upgrade(): sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True), sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True), sa.Column('spiff_step', sa.Integer(), nullable=True), - sa.Column('locked_by', sa.String(length=80), nullable=True), - sa.Column('locked_at_in_seconds', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ), sa.ForeignKeyConstraint(['bpmn_process_id'], ['bpmn_process.id'], ), sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ), @@ -207,20 +205,6 @@ def upgrade(): sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('key') ) - op.create_table('task', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('guid', sa.String(length=36), nullable=False), - sa.Column('bpmn_process_id', sa.Integer(), nullable=False), - sa.Column('state', sa.String(length=10), nullable=False), - sa.Column('properties_json', sa.JSON(), nullable=False), - sa.Column('json_data_hash', sa.String(length=255), nullable=False), - sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), - sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), - sa.ForeignKeyConstraint(['bpmn_process_id'], ['bpmn_process.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index(op.f('ix_task_guid'), 'task', ['guid'], unique=True) - op.create_index(op.f('ix_task_json_data_hash'), 'task', ['json_data_hash'], unique=False) op.create_table('task_definition', sa.Column('id', sa.Integer(), nullable=False), sa.Column('bpmn_process_definition_id', sa.Integer(), nullable=False), @@ -284,7 +268,7 @@ def upgrade(): sa.Column('payload', sa.JSON(), nullable=True), sa.Column('correlation_keys', sa.JSON(), nullable=True), sa.Column('status', sa.String(length=20), nullable=False), - sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('counterpart_id', sa.Integer(), nullable=True), sa.Column('failure_cause', sa.Text(), nullable=True), sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), @@ -331,6 +315,23 @@ def upgrade(): sa.UniqueConstraint('process_instance_id', 'key', name='process_instance_metadata_unique') ) op.create_index(op.f('ix_process_instance_metadata_key'), 'process_instance_metadata', ['key'], unique=False) + op.create_table('process_instance_queue', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('process_instance_id', sa.Integer(), nullable=False), + sa.Column('run_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('priority', sa.Integer(), nullable=True), + sa.Column('locked_by', sa.String(length=80), nullable=True), + sa.Column('locked_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('status', sa.String(length=50), nullable=True), + sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), 'process_instance_queue', ['locked_at_in_seconds'], unique=False) + op.create_index(op.f('ix_process_instance_queue_locked_by'), 'process_instance_queue', ['locked_by'], unique=False) + op.create_index(op.f('ix_process_instance_queue_process_instance_id'), 'process_instance_queue', ['process_instance_id'], unique=True) + op.create_index(op.f('ix_process_instance_queue_status'), 'process_instance_queue', ['status'], unique=False) op.create_table('spiff_step_details', sa.Column('id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=False), @@ -346,6 +347,24 @@ def upgrade(): sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('process_instance_id', 'spiff_step', name='process_instance_id_spiff_step') ) + op.create_table('task', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('guid', sa.String(length=36), nullable=False), + sa.Column('bpmn_process_id', sa.Integer(), nullable=False), + sa.Column('process_instance_id', sa.Integer(), nullable=False), + sa.Column('task_definition_id', sa.Integer(), nullable=False), + sa.Column('state', sa.String(length=10), nullable=False), + sa.Column('properties_json', sa.JSON(), nullable=False), + sa.Column('json_data_hash', sa.String(length=255), nullable=False), + sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), + sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), + sa.ForeignKeyConstraint(['bpmn_process_id'], ['bpmn_process.id'], ), + sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), + sa.ForeignKeyConstraint(['task_definition_id'], ['task_definition.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_task_guid'), 'task', ['guid'], unique=True) + op.create_index(op.f('ix_task_json_data_hash'), 'task', ['json_data_hash'], unique=False) op.create_table('human_task_user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('human_task_id', sa.Integer(), nullable=False), @@ -379,7 +398,15 @@ def downgrade(): op.drop_index(op.f('ix_human_task_user_user_id'), table_name='human_task_user') op.drop_index(op.f('ix_human_task_user_human_task_id'), table_name='human_task_user') op.drop_table('human_task_user') + op.drop_index(op.f('ix_task_json_data_hash'), table_name='task') + op.drop_index(op.f('ix_task_guid'), table_name='task') + op.drop_table('task') op.drop_table('spiff_step_details') + op.drop_index(op.f('ix_process_instance_queue_status'), table_name='process_instance_queue') + op.drop_index(op.f('ix_process_instance_queue_process_instance_id'), table_name='process_instance_queue') + op.drop_index(op.f('ix_process_instance_queue_locked_by'), table_name='process_instance_queue') + op.drop_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), table_name='process_instance_queue') + op.drop_table('process_instance_queue') op.drop_index(op.f('ix_process_instance_metadata_key'), table_name='process_instance_metadata') op.drop_table('process_instance_metadata') op.drop_index(op.f('ix_process_instance_file_data_digest'), table_name='process_instance_file_data') @@ -392,9 +419,6 @@ def downgrade(): op.drop_table('user_group_assignment') op.drop_index(op.f('ix_task_definition_bpmn_identifier'), table_name='task_definition') op.drop_table('task_definition') - op.drop_index(op.f('ix_task_json_data_hash'), table_name='task') - op.drop_index(op.f('ix_task_guid'), table_name='task') - op.drop_table('task') op.drop_table('secret') op.drop_table('refresh_token') op.drop_index(op.f('ix_process_instance_report_identifier'), table_name='process_instance_report') diff --git a/spiffworkflow-backend/migrations/versions/e2972eaf8469_.py b/spiffworkflow-backend/migrations/versions/e2972eaf8469_.py deleted file mode 100644 index f1796bfb..00000000 --- a/spiffworkflow-backend/migrations/versions/e2972eaf8469_.py +++ /dev/null @@ -1,58 +0,0 @@ -"""empty message - -Revision ID: e2972eaf8469 -Revises: 389800c352ee -Create Date: 2023-03-13 22:00:21.579493 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import mysql - -# revision identifiers, used by Alembic. -revision = 'e2972eaf8469' -down_revision = '389800c352ee' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('process_instance_queue', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('process_instance_id', sa.Integer(), nullable=False), - sa.Column('run_at_in_seconds', sa.Integer(), nullable=True), - sa.Column('priority', sa.Integer(), nullable=True), - sa.Column('locked_by', sa.String(length=80), nullable=True), - sa.Column('locked_at_in_seconds', sa.Integer(), nullable=True), - sa.Column('status', sa.String(length=50), nullable=True), - sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), - sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), 'process_instance_queue', ['locked_at_in_seconds'], unique=False) - op.create_index(op.f('ix_process_instance_queue_locked_by'), 'process_instance_queue', ['locked_by'], unique=False) - op.create_index(op.f('ix_process_instance_queue_process_instance_id'), 'process_instance_queue', ['process_instance_id'], unique=True) - op.create_index(op.f('ix_process_instance_queue_status'), 'process_instance_queue', ['status'], unique=False) - op.alter_column('message_instance', 'user_id', - existing_type=mysql.INTEGER(), - nullable=True) - op.drop_column('process_instance', 'locked_by') - op.drop_column('process_instance', 'locked_at_in_seconds') - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('process_instance', sa.Column('locked_at_in_seconds', mysql.INTEGER(), autoincrement=False, nullable=True)) - op.add_column('process_instance', sa.Column('locked_by', mysql.VARCHAR(length=80), nullable=True)) - op.alter_column('message_instance', 'user_id', - existing_type=mysql.INTEGER(), - nullable=False) - op.drop_index(op.f('ix_process_instance_queue_status'), table_name='process_instance_queue') - op.drop_index(op.f('ix_process_instance_queue_process_instance_id'), table_name='process_instance_queue') - op.drop_index(op.f('ix_process_instance_queue_locked_by'), table_name='process_instance_queue') - op.drop_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), table_name='process_instance_queue') - op.drop_table('process_instance_queue') - # ### end Alembic commands ### diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index b35c8759..14746d6f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -1,5 +1,8 @@ """Task.""" import enum + +from sqlalchemy.orm import relationship +from spiffworkflow_backend.models.task_definition import TaskDefinitionModel from dataclasses import dataclass from typing import Any from typing import Optional @@ -45,11 +48,16 @@ class TaskModel(SpiffworkflowBaseDBModel): bpmn_process_id: int = db.Column( ForeignKey(BpmnProcessModel.id), nullable=False # type: ignore ) + process_instance_id: int = db.Column( + ForeignKey("process_instance.id"), nullable=False + ) # find this by looking up the "workflow_name" and "task_spec" from the properties_json - # task_definition_id: int = db.Column( - # ForeignKey(TaskDefinitionModel.id), nullable=False # type: ignore - # ) + task_definition_id: int = db.Column( + ForeignKey(TaskDefinitionModel.id), nullable=False # type: ignore + ) + task_definition = relationship("TaskDefinitionModel") + state: str = db.Column(db.String(10), nullable=False) properties_json: dict = db.Column(db.JSON, nullable=False) json_data_hash: str = db.Column(db.String(255), nullable=False, index=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index dd2b405a..b50ece2f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -53,6 +53,7 @@ from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore from spiffworkflow_backend.exceptions.api_error import ApiError +from spiffworkflow_backend.models import task_definition from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.bpmn_process_definition import ( BpmnProcessDefinitionModel, @@ -457,6 +458,15 @@ class ProcessInstanceProcessor: self.process_model_service = ProcessModelService() bpmn_process_spec = None self.full_bpmn_process_dict = {} + + # this caches the bpmn_process_definition_identifier and task_identifier back to the bpmn_process_id + # in the database. This is to cut down on database queries while adding new tasks to the database. + # Structure: + # { "bpmn_process_definition_identifier": { "task_identifier": bpmn_process_id } } + # To use from a spiff_task: + # [spiff_task.workflow.spec.name][spiff_task.task_spec.name] + self.bpmn_definition_identifiers_to_bpmn_process_id_mappings = {} + subprocesses: Optional[IdToBpmnProcessSpecMapping] = None if process_instance_model.bpmn_process_definition_id is None: ( @@ -472,7 +482,7 @@ class ProcessInstanceProcessor: ) try: - (self.bpmn_process_instance, self.full_bpmn_process_dict) = ( + (self.bpmn_process_instance, self.full_bpmn_process_dict, self.bpmn_definition_identifiers_to_bpmn_process_id_mappings) = ( self.__get_bpmn_process_instance( process_instance_model, bpmn_process_spec, @@ -537,9 +547,20 @@ class ProcessInstanceProcessor: self.bpmn_process_instance ) + @classmethod + def _update_bpmn_definition_mappings( + cls, bpmn_definition_identifiers_to_bpmn_process_id_mappings: dict, bpmn_process_definition_identifier: str, task_definition: TaskDefinitionModel + ) -> None: + # import pdb; pdb.set_trace() + # if bpmn_process_definition_identifier == 'test_process_to_call' and task_definition.bpmn_identifier == "Root": + # import pdb; pdb.set_trace() + if bpmn_process_definition_identifier not in bpmn_definition_identifiers_to_bpmn_process_id_mappings: + bpmn_definition_identifiers_to_bpmn_process_id_mappings[bpmn_process_definition_identifier] = {} + bpmn_definition_identifiers_to_bpmn_process_id_mappings[bpmn_process_definition_identifier][task_definition.bpmn_identifier] = task_definition + @classmethod def _get_definition_dict_for_bpmn_process_definition( - cls, bpmn_process_definition: BpmnProcessDefinitionModel + cls, bpmn_process_definition: BpmnProcessDefinitionModel, bpmn_definition_identifiers_to_bpmn_process_id_mappings: dict ) -> dict: task_definitions = TaskDefinitionModel.query.filter_by( bpmn_process_definition_id=bpmn_process_definition.id @@ -550,6 +571,7 @@ class ProcessInstanceProcessor: bpmn_process_definition_dict["task_specs"][ task_definition.bpmn_identifier ] = task_definition.properties_json + cls._update_bpmn_definition_mappings(bpmn_definition_identifiers_to_bpmn_process_id_mappings, bpmn_process_definition.bpmn_identifier, task_definition) return bpmn_process_definition_dict @classmethod @@ -557,6 +579,7 @@ class ProcessInstanceProcessor: cls, bpmn_process_definition: BpmnProcessDefinitionModel, spiff_bpmn_process_dict: dict, + bpmn_definition_identifiers_to_bpmn_process_id_mappings: dict, ) -> None: # find all child subprocesses of a process bpmn_process_subprocess_definitions = ( @@ -595,6 +618,7 @@ class ProcessInstanceProcessor: task_definition.bpmn_process_definition_id ] ) + cls._update_bpmn_definition_mappings(bpmn_definition_identifiers_to_bpmn_process_id_mappings, bpmn_subprocess_definition_bpmn_identifier, task_definition) spiff_bpmn_process_dict["subprocess_specs"][ bpmn_subprocess_definition_bpmn_identifier ]["task_specs"][ @@ -643,7 +667,7 @@ class ProcessInstanceProcessor: @classmethod def _get_full_bpmn_process_dict( - cls, process_instance_model: ProcessInstanceModel + cls, process_instance_model: ProcessInstanceModel, bpmn_definition_identifiers_to_bpmn_process_id_mappings: dict ) -> dict: if process_instance_model.bpmn_process_definition_id is None: return {} @@ -658,11 +682,11 @@ class ProcessInstanceProcessor: if bpmn_process_definition is not None: spiff_bpmn_process_dict["spec"] = ( cls._get_definition_dict_for_bpmn_process_definition( - bpmn_process_definition + bpmn_process_definition, bpmn_definition_identifiers_to_bpmn_process_id_mappings ) ) cls._set_definition_dict_for_bpmn_subprocess_definitions( - bpmn_process_definition, spiff_bpmn_process_dict + bpmn_process_definition, spiff_bpmn_process_dict, bpmn_definition_identifiers_to_bpmn_process_id_mappings ) bpmn_process = process_instance_model.bpmn_process @@ -729,8 +753,10 @@ class ProcessInstanceProcessor: spec: Optional[BpmnProcessSpec] = None, validate_only: bool = False, subprocesses: Optional[IdToBpmnProcessSpecMapping] = None, - ) -> BpmnWorkflow: + ) -> Tuple[BpmnWorkflow, dict, dict]: full_bpmn_process_dict = {} + bpmn_definition_identifiers_to_bpmn_process_id_mappings = {} + print("GET BPMN PROCESS INSTANCE") if process_instance_model.bpmn_process_definition_id is not None: # turn off logging to avoid duplicated spiff logs spiff_logger = logging.getLogger("spiff") @@ -740,9 +766,10 @@ class ProcessInstanceProcessor: try: full_bpmn_process_dict = ( ProcessInstanceProcessor._get_full_bpmn_process_dict( - process_instance_model + process_instance_model, bpmn_definition_identifiers_to_bpmn_process_id_mappings ) ) + print("WE GOT FULL BPMN PROCESS DICT") bpmn_process_instance = ( ProcessInstanceProcessor._serializer.workflow_from_dict( full_bpmn_process_dict @@ -755,15 +782,17 @@ class ProcessInstanceProcessor: ProcessInstanceProcessor.set_script_engine(bpmn_process_instance) else: + print("WE NO HAVE FULL BPMN YET") bpmn_process_instance = ( ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec( spec, subprocesses ) ) + # import pdb; pdb.set_trace() bpmn_process_instance.data[ ProcessInstanceProcessor.VALIDATION_PROCESS_KEY ] = validate_only - return (bpmn_process_instance, full_bpmn_process_dict) + return (bpmn_process_instance, full_bpmn_process_dict, bpmn_definition_identifiers_to_bpmn_process_id_mappings) def slam_in_data(self, data: dict) -> None: """Slam_in_data.""" @@ -1025,6 +1054,7 @@ class ProcessInstanceProcessor: self, process_bpmn_properties: dict, bpmn_process_definition_parent: Optional[BpmnProcessDefinitionModel] = None, + store_bpmn_definition_mappings: bool = False, ) -> BpmnProcessDefinitionModel: process_bpmn_identifier = process_bpmn_properties["name"] new_hash_digest = sha256( @@ -1033,7 +1063,16 @@ class ProcessInstanceProcessor: bpmn_process_definition: Optional[BpmnProcessDefinitionModel] = ( BpmnProcessDefinitionModel.query.filter_by(hash=new_hash_digest).first() ) + print(f"process_bpmn_properties: {process_bpmn_properties}") + # import pdb; pdb.set_trace() + # if process_bpmn_identifier == "test_process_to_call": + # import pdb; pdb.set_trace() + # print("HEY22") + + print(f"self.process_instance_model.id: {self.process_instance_model.id}") if bpmn_process_definition is None: + # import pdb; pdb.set_trace() + print("NO DEFINITION") task_specs = process_bpmn_properties.pop("task_specs") bpmn_process_definition = BpmnProcessDefinitionModel( hash=new_hash_digest, @@ -1050,6 +1089,14 @@ class ProcessInstanceProcessor: typename=task_bpmn_properties["typename"], ) db.session.add(task_definition) + if store_bpmn_definition_mappings: + self._update_bpmn_definition_mappings(self.bpmn_definition_identifiers_to_bpmn_process_id_mappings, process_bpmn_identifier, task_definition) + elif store_bpmn_definition_mappings: + # this should only ever happen when new process instances use a pre-existing bpmn process definitions + # otherwise this should get populated on processor initialization + task_definitions = TaskDefinitionModel.query.filter_by(bpmn_process_definition_id=bpmn_process_definition.id).all() + for task_definition in task_definitions: + self._update_bpmn_definition_mappings(self.bpmn_definition_identifiers_to_bpmn_process_id_mappings, process_bpmn_identifier, task_definition) if bpmn_process_definition_parent is not None: bpmn_process_definition_relationship = ( @@ -1067,13 +1114,17 @@ class ProcessInstanceProcessor: return bpmn_process_definition def _add_bpmn_process_definitions(self, bpmn_spec_dict: dict) -> None: + # store only if mappings is currently empty. this also would mean this is a new instance that has never saved before + print("WE STORE BPM PROCESS DEF") + store_bpmn_definition_mappings = not self.bpmn_definition_identifiers_to_bpmn_process_id_mappings bpmn_process_definition_parent = self._store_bpmn_process_definition( - bpmn_spec_dict["spec"] + bpmn_spec_dict["spec"], store_bpmn_definition_mappings=store_bpmn_definition_mappings ) for process_bpmn_properties in bpmn_spec_dict["subprocess_specs"].values(): self._store_bpmn_process_definition( - process_bpmn_properties, bpmn_process_definition_parent + process_bpmn_properties, bpmn_process_definition_parent, store_bpmn_definition_mappings=store_bpmn_definition_mappings ) + # import pdb; pdb.set_trace() self.process_instance_model.bpmn_process_definition = ( bpmn_process_definition_parent ) @@ -1083,7 +1134,8 @@ class ProcessInstanceProcessor: Expects the save method to commit it. """ - bpmn_dict = json.loads(self.serialize()) + print("WE SAVE THINGS") + bpmn_dict = self.serialize() bpmn_dict_keys = ("spec", "subprocess_specs", "serializer_version") process_instance_data_dict = {} bpmn_spec_dict = {} @@ -1096,11 +1148,14 @@ class ProcessInstanceProcessor: # FIXME: always save new hash until we get updated Spiff without loopresettask # if self.process_instance_model.bpmn_process_definition_id is None: self._add_bpmn_process_definitions(bpmn_spec_dict) + # import pdb; pdb.set_trace() + print("WE NOW STORE BPMN PROCESS STUFFS") + print(f"bpmn_definition_identifiers_to_bpmn_process_id_mappings: {self.bpmn_definition_identifiers_to_bpmn_process_id_mappings}") subprocesses = process_instance_data_dict.pop("subprocesses") bpmn_process_parent, new_task_models, new_json_data_dicts = ( TaskService.add_bpmn_process( - process_instance_data_dict, self.process_instance_model + process_instance_data_dict, self.process_instance_model, bpmn_definition_identifiers_to_bpmn_process_id_mappings=self.bpmn_definition_identifiers_to_bpmn_process_id_mappings, spiff_workflow=self.bpmn_process_instance ) ) for subprocess_task_id, subprocess_properties in subprocesses.items(): @@ -1113,6 +1168,8 @@ class ProcessInstanceProcessor: self.process_instance_model, bpmn_process_parent, bpmn_process_guid=subprocess_task_id, + bpmn_definition_identifiers_to_bpmn_process_id_mappings=self.bpmn_definition_identifiers_to_bpmn_process_id_mappings, + spiff_workflow=self.bpmn_process_instance ) new_task_models.update(subprocess_new_task_models) new_json_data_dicts.update(subprocess_new_json_data_models) @@ -1122,6 +1179,7 @@ class ProcessInstanceProcessor: def save(self) -> None: """Saves the current state of this processor to the database.""" + print("WE IN SAVE") self._add_bpmn_json_records() self.process_instance_model.spiff_serializer_version = self.SERIALIZER_VERSION @@ -1631,6 +1689,7 @@ class ProcessInstanceProcessor: secondary_engine_step_delegate=step_delegate, serializer=self._serializer, process_instance=self.process_instance_model, + bpmn_definition_identifiers_to_bpmn_process_id_mappings=self.bpmn_definition_identifiers_to_bpmn_process_id_mappings, ) if execution_strategy_name is None: @@ -1722,11 +1781,12 @@ class ProcessInstanceProcessor: ) ) - def serialize(self) -> str: + def serialize(self) -> dict: """Serialize.""" self.check_task_data_size() self.preserve_script_engine_state() - return self._serializer.serialize_json(self.bpmn_process_instance) # type: ignore + # return self._serializer.workflow_to_dict(self.bpmn_process_instance) # type: ignore + return json.loads(self._serializer.serialize_json(self.bpmn_process_instance)) # type: ignore def next_user_tasks(self) -> list[SpiffTask]: """Next_user_tasks.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index dbd0a912..f782241f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -11,6 +11,7 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskStateNames from sqlalchemy.dialects.mysql import insert as mysql_insert from sqlalchemy.dialects.postgresql import insert as postgres_insert +from uuid import UUID from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.db import db @@ -44,24 +45,13 @@ class TaskService: ) db.session.execute(on_duplicate_key_stmt) - @classmethod - def _update_task_data_on_task_model( - cls, task_model: TaskModel, task_data_dict: dict - ) -> Optional[JsonDataDict]: - task_data_json = json.dumps(task_data_dict, sort_keys=True) - task_data_hash: str = sha256(task_data_json.encode("utf8")).hexdigest() - json_data_dict: Optional[JsonDataDict] = None - if task_model.json_data_hash != task_data_hash: - json_data_dict = {"hash": task_data_hash, "data": task_data_dict} - task_model.json_data_hash = task_data_hash - return json_data_dict - @classmethod def update_task_model( cls, task_model: TaskModel, spiff_task: SpiffTask, serializer: BpmnWorkflowSerializer, + bpmn_definition_identifiers_to_bpmn_process_id_mappings: Optional[dict] = None, ) -> Optional[JsonDataDict]: """Updates properties_json and data on given task_model. @@ -83,6 +73,7 @@ class TaskService: spiff_task: SpiffTask, process_instance: ProcessInstanceModel, serializer: BpmnWorkflowSerializer, + bpmn_definition_identifiers_to_bpmn_process_id_mappings: Optional[dict] = None, ) -> Tuple[ Optional[BpmnProcessModel], TaskModel, @@ -98,12 +89,13 @@ class TaskService: new_json_data_dicts: dict[str, JsonDataDict] = {} if task_model is None: bpmn_process, new_task_models, new_json_data_dicts = cls.task_bpmn_process( - spiff_task, process_instance, serializer + spiff_task, process_instance, serializer, bpmn_definition_identifiers_to_bpmn_process_id_mappings=bpmn_definition_identifiers_to_bpmn_process_id_mappings ) task_model = TaskModel.query.filter_by(guid=spiff_task_guid).first() + task_definition = bpmn_definition_identifiers_to_bpmn_process_id_mappings[spiff_task.workflow.spec.name][spiff_task.task_spec.name] if task_model is None: task_model = TaskModel( - guid=spiff_task_guid, bpmn_process_id=bpmn_process.id + guid=spiff_task_guid, bpmn_process_id=bpmn_process.id, process_instance_id=process_instance.id, task_definition_id=task_definition.id ) return (bpmn_process, task_model, new_task_models, new_json_data_dicts) @@ -130,6 +122,7 @@ class TaskService: spiff_task: SpiffTask, process_instance: ProcessInstanceModel, serializer: BpmnWorkflowSerializer, + bpmn_definition_identifiers_to_bpmn_process_id_mappings: Optional[dict] = None, ) -> Tuple[BpmnProcessModel, dict[str, TaskModel], dict[str, JsonDataDict]]: subprocess_guid, subprocess = cls.task_subprocess(spiff_task) bpmn_process: Optional[BpmnProcessModel] = None @@ -140,12 +133,15 @@ class TaskService: # This is the top level workflow, which has no guid # check for bpmn_process_id because mypy doesn't realize bpmn_process can be None if process_instance.bpmn_process_id is None: + spiff_workflow = spiff_task.workflow._get_outermost_workflow() bpmn_process, new_task_models, new_json_data_dicts = ( cls.add_bpmn_process( serializer.workflow_to_dict( - spiff_task.workflow._get_outermost_workflow() + spiff_workflow ), process_instance, + bpmn_definition_identifiers_to_bpmn_process_id_mappings=bpmn_definition_identifiers_to_bpmn_process_id_mappings, + spiff_workflow=spiff_workflow, ) ) else: @@ -153,12 +149,16 @@ class TaskService: guid=subprocess_guid ).first() if bpmn_process is None: + spiff_workflow = spiff_task.workflow bpmn_process, new_task_models, new_json_data_dicts = ( cls.add_bpmn_process( serializer.workflow_to_dict(subprocess), process_instance, process_instance.bpmn_process, subprocess_guid, + bpmn_definition_identifiers_to_bpmn_process_id_mappings=bpmn_definition_identifiers_to_bpmn_process_id_mappings, + spiff_workflow=spiff_workflow, + ) ) return (bpmn_process, new_task_models, new_json_data_dicts) @@ -170,6 +170,8 @@ class TaskService: process_instance: ProcessInstanceModel, bpmn_process_parent: Optional[BpmnProcessModel] = None, bpmn_process_guid: Optional[str] = None, + bpmn_definition_identifiers_to_bpmn_process_id_mappings: Optional[dict] = None, + spiff_workflow: Optional[BpmnWorkflow] = None, ) -> Tuple[BpmnProcessModel, dict[str, TaskModel], dict[str, JsonDataDict]]: """This creates and adds a bpmn_process to the Db session. @@ -183,6 +185,7 @@ class TaskService: new_json_data_dicts: dict[str, JsonDataDict] = {} bpmn_process = None + print("ADD BPMN PROCESS") if bpmn_process_parent is not None: bpmn_process = BpmnProcessModel.query.filter_by( parent_process_id=bpmn_process_parent.id, guid=bpmn_process_guid @@ -194,6 +197,9 @@ class TaskService: if bpmn_process is None: bpmn_process_is_new = True bpmn_process = BpmnProcessModel(guid=bpmn_process_guid) + for task_id, task_properties in tasks.items(): + if task_properties['task_spec'] == 'Start': + bpmn_process_dict['root'] = task_id bpmn_process.properties_json = bpmn_process_dict @@ -202,6 +208,7 @@ class TaskService: bpmn_process_data_json.encode("utf8") ).hexdigest() if bpmn_process.json_data_hash != bpmn_process_data_hash: + # print(f"bpmn_process_data_dict: {bpmn_process_data_dict}") new_json_data_dicts[bpmn_process_data_hash] = { "hash": bpmn_process_data_hash, "data": bpmn_process_data_dict, @@ -219,6 +226,16 @@ class TaskService: if bpmn_process_is_new: for task_id, task_properties in tasks.items(): + if task_properties['task_spec'] == 'Root': + continue + if task_properties['task_spec'] == 'Start': + task_properties['parent'] = None + process_dict = bpmn_process.properties_json + process_dict['root'] = task_id + # print(f"process_dict: {process_dict}") + bpmn_process.properties_json = process_dict + # print(f"bpmn_process.properties_json: {bpmn_process.properties_json}") + db.session.add(bpmn_process) task_data_dict = task_properties.pop("data") state_int = task_properties["state"] @@ -231,8 +248,15 @@ class TaskService: # .join(BpmnProcessDefinitionModel).filter(BpmnProcessDefinitionModel.bpmn_identifier==bpmn_process_identifier).first() # if task_definition is None: # subprocess_task = TaskModel.query.filter_by(guid=bpmn_process.guid) + spiff_task = spiff_workflow.get_task(UUID(task_id)) + try: + task_definition = bpmn_definition_identifiers_to_bpmn_process_id_mappings[spiff_task.workflow.spec.name][spiff_task.task_spec.name] + except Exception as ex: + import pdb; pdb.set_trace() + print("HEY") + raise ex task_model = TaskModel( - guid=task_id, bpmn_process_id=bpmn_process.id + guid=task_id, bpmn_process_id=bpmn_process.id, process_instance_id=process_instance.id, task_definition_id=task_definition.id ) task_model.state = TaskStateNames[state_int] task_model.properties_json = task_properties @@ -245,3 +269,15 @@ class TaskService: new_json_data_dicts[json_data_dict["hash"]] = json_data_dict return (bpmn_process, new_task_models, new_json_data_dicts) + + @classmethod + def _update_task_data_on_task_model( + cls, task_model: TaskModel, task_data_dict: dict + ) -> Optional[JsonDataDict]: + task_data_json = json.dumps(task_data_dict, sort_keys=True) + task_data_hash: str = sha256(task_data_json.encode("utf8")).hexdigest() + json_data_dict: Optional[JsonDataDict] = None + if task_model.json_data_hash != task_data_hash: + json_data_dict = {"hash": task_data_hash, "data": task_data_dict} + task_model.json_data_hash = task_data_hash + return json_data_dict diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 386f2054..9a4a6c7d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -58,9 +58,11 @@ class TaskModelSavingDelegate(EngineStepDelegate): serializer: BpmnWorkflowSerializer, process_instance: ProcessInstanceModel, secondary_engine_step_delegate: Optional[EngineStepDelegate] = None, + bpmn_definition_identifiers_to_bpmn_process_id_mappings: Optional[dict] = None, ) -> None: self.secondary_engine_step_delegate = secondary_engine_step_delegate self.process_instance = process_instance + self.bpmn_definition_identifiers_to_bpmn_process_id_mappings = bpmn_definition_identifiers_to_bpmn_process_id_mappings self.current_task_model: Optional[TaskModel] = None self.task_models: dict[str, TaskModel] = {} @@ -78,7 +80,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): if self.should_update_task_model(): _bpmn_process, task_model, new_task_models, new_json_data_dicts = ( TaskService.find_or_create_task_model_from_spiff_task( - spiff_task, self.process_instance, self.serializer + spiff_task, self.process_instance, self.serializer, bpmn_definition_identifiers_to_bpmn_process_id_mappings=self.bpmn_definition_identifiers_to_bpmn_process_id_mappings ) ) self.current_task_model = task_model @@ -92,7 +94,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): if self.current_task_model and self.should_update_task_model(): self.current_task_model.end_in_seconds = time.time() json_data_dict = TaskService.update_task_model( - self.current_task_model, spiff_task, self.serializer + self.current_task_model, spiff_task, self.serializer, bpmn_definition_identifiers_to_bpmn_process_id_mappings=self.bpmn_definition_identifiers_to_bpmn_process_id_mappings ) if json_data_dict is not None: self.json_data_dicts[json_data_dict["hash"]] = json_data_dict @@ -121,13 +123,13 @@ class TaskModelSavingDelegate(EngineStepDelegate): ): _bpmn_process, task_model, new_task_models, new_json_data_dicts = ( TaskService.find_or_create_task_model_from_spiff_task( - waiting_spiff_task, self.process_instance, self.serializer + waiting_spiff_task, self.process_instance, self.serializer, bpmn_definition_identifiers_to_bpmn_process_id_mappings=self.bpmn_definition_identifiers_to_bpmn_process_id_mappings ) ) self.task_models.update(new_task_models) self.json_data_dicts.update(new_json_data_dicts) json_data_dict = TaskService.update_task_model( - task_model, waiting_spiff_task, self.serializer + task_model, waiting_spiff_task, self.serializer, bpmn_definition_identifiers_to_bpmn_process_id_mappings=self.bpmn_definition_identifiers_to_bpmn_process_id_mappings ) self.task_models[task_model.guid] = task_model if json_data_dict is not None: diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py similarity index 98% rename from spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling.py rename to spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py index 9d481788..4566625a 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py @@ -33,6 +33,7 @@ class TestErrorHandlingService(BaseTest): process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier( process_model.id, user ) + print(f"process_instance.id: {process_instance.id}") pip = ProcessInstanceProcessor(process_instance) with pytest.raises(ApiError) as e: pip.do_engine_steps(save=True) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 3452dcf1..6bd7a305 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -378,6 +378,24 @@ class TestProcessInstanceProcessor(BaseTest): assert len(all_spiff_tasks) > 1 for spiff_task in all_spiff_tasks: assert spiff_task.state == TaskState.COMPLETED + if spiff_task.task_spec.name == 'test_process_to_call_script': + task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() + assert task.task_definition_id is not None + task_definition = task.task_definition + assert task_definition.bpmn_identifier == 'test_process_to_call_script' + assert task_definition.bpmn_process_definition.bpmn_identifier == 'test_process_to_call' + elif spiff_task.task_spec.name == 'top_level_subprocess_script': + task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() + assert task.task_definition_id is not None + task_definition = task.task_definition + assert task_definition.bpmn_identifier == 'top_level_subprocess_script' + assert task_definition.bpmn_process_definition.bpmn_identifier == 'top_level_subprocess' + if spiff_task.task_spec.name == 'top_level_script': + task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() + assert task.task_definition_id is not None + task_definition = task.task_definition + assert task_definition.bpmn_identifier == 'top_level_script' + assert task_definition.bpmn_process_definition.bpmn_identifier == 'top_level_process' # FIXME: Checking task data cannot work with the feature/remove-loop-reset branch # of SiffWorkflow. This is because it saves script data to the python_env and NOT # to task.data. We may need to either create a new column on TaskModel to put the python_env From 929f3244cad275d6c002150ebbb2638492a35662 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 15 Mar 2023 11:56:00 -0400 Subject: [PATCH 015/162] all tests are passing w/ burnettk --- .../models/bpmn_process.py | 3 + .../models/process_instance.py | 3 +- .../services/process_instance_processor.py | 81 ++++++++++--------- .../services/task_service.py | 59 ++++++-------- .../services/workflow_execution_service.py | 12 +-- 5 files changed, 76 insertions(+), 82 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py index 67e295e9..faae68c1 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py @@ -1,4 +1,5 @@ from __future__ import annotations +from sqlalchemy.orm import relationship from sqlalchemy import ForeignKey @@ -24,6 +25,8 @@ class BpmnProcessModel(SpiffworkflowBaseDBModel): properties_json: dict = db.Column(db.JSON, nullable=False) json_data_hash: str = db.Column(db.String(255), nullable=False, index=True) + tasks = relationship("TaskModel", cascade="delete") # type: ignore + # subprocess or top_level_process # process_type: str = db.Column(db.String(30), nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py index f155494a..dc66c86f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py @@ -71,7 +71,8 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): bpmn_process_id: int | None = db.Column( ForeignKey(BpmnProcessModel.id), nullable=True # type: ignore ) - bpmn_process = relationship(BpmnProcessModel) + bpmn_process = relationship(BpmnProcessModel, cascade="delete") + tasks = relationship("TaskModel", cascade="delete") # type: ignore spiff_serializer_version = db.Column(db.String(50), nullable=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index b50ece2f..17180137 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -462,10 +462,10 @@ class ProcessInstanceProcessor: # this caches the bpmn_process_definition_identifier and task_identifier back to the bpmn_process_id # in the database. This is to cut down on database queries while adding new tasks to the database. # Structure: - # { "bpmn_process_definition_identifier": { "task_identifier": bpmn_process_id } } + # { "bpmn_process_definition_identifier": { "task_identifier": task_definition } } # To use from a spiff_task: # [spiff_task.workflow.spec.name][spiff_task.task_spec.name] - self.bpmn_definition_identifiers_to_bpmn_process_id_mappings = {} + self.bpmn_definition_to_task_definitions_mappings = {} subprocesses: Optional[IdToBpmnProcessSpecMapping] = None if process_instance_model.bpmn_process_definition_id is None: @@ -482,7 +482,7 @@ class ProcessInstanceProcessor: ) try: - (self.bpmn_process_instance, self.full_bpmn_process_dict, self.bpmn_definition_identifiers_to_bpmn_process_id_mappings) = ( + (self.bpmn_process_instance, self.full_bpmn_process_dict, self.bpmn_definition_to_task_definitions_mappings) = ( self.__get_bpmn_process_instance( process_instance_model, bpmn_process_spec, @@ -549,18 +549,18 @@ class ProcessInstanceProcessor: @classmethod def _update_bpmn_definition_mappings( - cls, bpmn_definition_identifiers_to_bpmn_process_id_mappings: dict, bpmn_process_definition_identifier: str, task_definition: TaskDefinitionModel + cls, bpmn_definition_to_task_definitions_mappings: dict, bpmn_process_definition_identifier: str, task_definition: TaskDefinitionModel ) -> None: # import pdb; pdb.set_trace() # if bpmn_process_definition_identifier == 'test_process_to_call' and task_definition.bpmn_identifier == "Root": # import pdb; pdb.set_trace() - if bpmn_process_definition_identifier not in bpmn_definition_identifiers_to_bpmn_process_id_mappings: - bpmn_definition_identifiers_to_bpmn_process_id_mappings[bpmn_process_definition_identifier] = {} - bpmn_definition_identifiers_to_bpmn_process_id_mappings[bpmn_process_definition_identifier][task_definition.bpmn_identifier] = task_definition + if bpmn_process_definition_identifier not in bpmn_definition_to_task_definitions_mappings: + bpmn_definition_to_task_definitions_mappings[bpmn_process_definition_identifier] = {} + bpmn_definition_to_task_definitions_mappings[bpmn_process_definition_identifier][task_definition.bpmn_identifier] = task_definition @classmethod def _get_definition_dict_for_bpmn_process_definition( - cls, bpmn_process_definition: BpmnProcessDefinitionModel, bpmn_definition_identifiers_to_bpmn_process_id_mappings: dict + cls, bpmn_process_definition: BpmnProcessDefinitionModel, bpmn_definition_to_task_definitions_mappings: dict ) -> dict: task_definitions = TaskDefinitionModel.query.filter_by( bpmn_process_definition_id=bpmn_process_definition.id @@ -571,7 +571,7 @@ class ProcessInstanceProcessor: bpmn_process_definition_dict["task_specs"][ task_definition.bpmn_identifier ] = task_definition.properties_json - cls._update_bpmn_definition_mappings(bpmn_definition_identifiers_to_bpmn_process_id_mappings, bpmn_process_definition.bpmn_identifier, task_definition) + cls._update_bpmn_definition_mappings(bpmn_definition_to_task_definitions_mappings, bpmn_process_definition.bpmn_identifier, task_definition) return bpmn_process_definition_dict @classmethod @@ -579,7 +579,7 @@ class ProcessInstanceProcessor: cls, bpmn_process_definition: BpmnProcessDefinitionModel, spiff_bpmn_process_dict: dict, - bpmn_definition_identifiers_to_bpmn_process_id_mappings: dict, + bpmn_definition_to_task_definitions_mappings: dict, ) -> None: # find all child subprocesses of a process bpmn_process_subprocess_definitions = ( @@ -618,7 +618,7 @@ class ProcessInstanceProcessor: task_definition.bpmn_process_definition_id ] ) - cls._update_bpmn_definition_mappings(bpmn_definition_identifiers_to_bpmn_process_id_mappings, bpmn_subprocess_definition_bpmn_identifier, task_definition) + cls._update_bpmn_definition_mappings(bpmn_definition_to_task_definitions_mappings, bpmn_subprocess_definition_bpmn_identifier, task_definition) spiff_bpmn_process_dict["subprocess_specs"][ bpmn_subprocess_definition_bpmn_identifier ]["task_specs"][ @@ -667,7 +667,7 @@ class ProcessInstanceProcessor: @classmethod def _get_full_bpmn_process_dict( - cls, process_instance_model: ProcessInstanceModel, bpmn_definition_identifiers_to_bpmn_process_id_mappings: dict + cls, process_instance_model: ProcessInstanceModel, bpmn_definition_to_task_definitions_mappings: dict ) -> dict: if process_instance_model.bpmn_process_definition_id is None: return {} @@ -682,11 +682,11 @@ class ProcessInstanceProcessor: if bpmn_process_definition is not None: spiff_bpmn_process_dict["spec"] = ( cls._get_definition_dict_for_bpmn_process_definition( - bpmn_process_definition, bpmn_definition_identifiers_to_bpmn_process_id_mappings + bpmn_process_definition, bpmn_definition_to_task_definitions_mappings ) ) cls._set_definition_dict_for_bpmn_subprocess_definitions( - bpmn_process_definition, spiff_bpmn_process_dict, bpmn_definition_identifiers_to_bpmn_process_id_mappings + bpmn_process_definition, spiff_bpmn_process_dict, bpmn_definition_to_task_definitions_mappings ) bpmn_process = process_instance_model.bpmn_process @@ -755,8 +755,8 @@ class ProcessInstanceProcessor: subprocesses: Optional[IdToBpmnProcessSpecMapping] = None, ) -> Tuple[BpmnWorkflow, dict, dict]: full_bpmn_process_dict = {} - bpmn_definition_identifiers_to_bpmn_process_id_mappings = {} - print("GET BPMN PROCESS INSTANCE") + bpmn_definition_to_task_definitions_mappings = {} + # print("GET BPMN PROCESS INSTANCE") if process_instance_model.bpmn_process_definition_id is not None: # turn off logging to avoid duplicated spiff logs spiff_logger = logging.getLogger("spiff") @@ -766,10 +766,10 @@ class ProcessInstanceProcessor: try: full_bpmn_process_dict = ( ProcessInstanceProcessor._get_full_bpmn_process_dict( - process_instance_model, bpmn_definition_identifiers_to_bpmn_process_id_mappings + process_instance_model, bpmn_definition_to_task_definitions_mappings ) ) - print("WE GOT FULL BPMN PROCESS DICT") + # print("WE GOT FULL BPMN PROCESS DICT") bpmn_process_instance = ( ProcessInstanceProcessor._serializer.workflow_from_dict( full_bpmn_process_dict @@ -782,7 +782,7 @@ class ProcessInstanceProcessor: ProcessInstanceProcessor.set_script_engine(bpmn_process_instance) else: - print("WE NO HAVE FULL BPMN YET") + # print("WE NO HAVE FULL BPMN YET") bpmn_process_instance = ( ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec( spec, subprocesses @@ -792,7 +792,7 @@ class ProcessInstanceProcessor: bpmn_process_instance.data[ ProcessInstanceProcessor.VALIDATION_PROCESS_KEY ] = validate_only - return (bpmn_process_instance, full_bpmn_process_dict, bpmn_definition_identifiers_to_bpmn_process_id_mappings) + return (bpmn_process_instance, full_bpmn_process_dict, bpmn_definition_to_task_definitions_mappings) def slam_in_data(self, data: dict) -> None: """Slam_in_data.""" @@ -1063,16 +1063,14 @@ class ProcessInstanceProcessor: bpmn_process_definition: Optional[BpmnProcessDefinitionModel] = ( BpmnProcessDefinitionModel.query.filter_by(hash=new_hash_digest).first() ) - print(f"process_bpmn_properties: {process_bpmn_properties}") + # print(f"process_bpmn_properties: {process_bpmn_properties}") # import pdb; pdb.set_trace() # if process_bpmn_identifier == "test_process_to_call": # import pdb; pdb.set_trace() - # print("HEY22") + # # print("HEY22") - print(f"self.process_instance_model.id: {self.process_instance_model.id}") + # print(f"self.process_instance_model.id: {self.process_instance_model.id}") if bpmn_process_definition is None: - # import pdb; pdb.set_trace() - print("NO DEFINITION") task_specs = process_bpmn_properties.pop("task_specs") bpmn_process_definition = BpmnProcessDefinitionModel( hash=new_hash_digest, @@ -1090,13 +1088,13 @@ class ProcessInstanceProcessor: ) db.session.add(task_definition) if store_bpmn_definition_mappings: - self._update_bpmn_definition_mappings(self.bpmn_definition_identifiers_to_bpmn_process_id_mappings, process_bpmn_identifier, task_definition) + self._update_bpmn_definition_mappings(self.bpmn_definition_to_task_definitions_mappings, process_bpmn_identifier, task_definition) elif store_bpmn_definition_mappings: # this should only ever happen when new process instances use a pre-existing bpmn process definitions # otherwise this should get populated on processor initialization task_definitions = TaskDefinitionModel.query.filter_by(bpmn_process_definition_id=bpmn_process_definition.id).all() for task_definition in task_definitions: - self._update_bpmn_definition_mappings(self.bpmn_definition_identifiers_to_bpmn_process_id_mappings, process_bpmn_identifier, task_definition) + self._update_bpmn_definition_mappings(self.bpmn_definition_to_task_definitions_mappings, process_bpmn_identifier, task_definition) if bpmn_process_definition_parent is not None: bpmn_process_definition_relationship = ( @@ -1115,8 +1113,8 @@ class ProcessInstanceProcessor: def _add_bpmn_process_definitions(self, bpmn_spec_dict: dict) -> None: # store only if mappings is currently empty. this also would mean this is a new instance that has never saved before - print("WE STORE BPM PROCESS DEF") - store_bpmn_definition_mappings = not self.bpmn_definition_identifiers_to_bpmn_process_id_mappings + # print("WE STORE BPM PROCESS DEF") + store_bpmn_definition_mappings = not self.bpmn_definition_to_task_definitions_mappings bpmn_process_definition_parent = self._store_bpmn_process_definition( bpmn_spec_dict["spec"], store_bpmn_definition_mappings=store_bpmn_definition_mappings ) @@ -1134,7 +1132,7 @@ class ProcessInstanceProcessor: Expects the save method to commit it. """ - print("WE SAVE THINGS") + # print("WE SAVE THINGS") bpmn_dict = self.serialize() bpmn_dict_keys = ("spec", "subprocess_specs", "serializer_version") process_instance_data_dict = {} @@ -1149,13 +1147,16 @@ class ProcessInstanceProcessor: # if self.process_instance_model.bpmn_process_definition_id is None: self._add_bpmn_process_definitions(bpmn_spec_dict) # import pdb; pdb.set_trace() - print("WE NOW STORE BPMN PROCESS STUFFS") - print(f"bpmn_definition_identifiers_to_bpmn_process_id_mappings: {self.bpmn_definition_identifiers_to_bpmn_process_id_mappings}") + # print("WE NOW STORE BPMN PROCESS STUFFS") + # print(f"bpmn_definition_to_task_definitions_mappings: {self.bpmn_definition_to_task_definitions_mappings}") subprocesses = process_instance_data_dict.pop("subprocesses") bpmn_process_parent, new_task_models, new_json_data_dicts = ( TaskService.add_bpmn_process( - process_instance_data_dict, self.process_instance_model, bpmn_definition_identifiers_to_bpmn_process_id_mappings=self.bpmn_definition_identifiers_to_bpmn_process_id_mappings, spiff_workflow=self.bpmn_process_instance + bpmn_process_dict=process_instance_data_dict, + process_instance=self.process_instance_model, + bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, + spiff_workflow=self.bpmn_process_instance ) ) for subprocess_task_id, subprocess_properties in subprocesses.items(): @@ -1164,11 +1165,11 @@ class ProcessInstanceProcessor: subprocess_new_task_models, subprocess_new_json_data_models, ) = TaskService.add_bpmn_process( - subprocess_properties, - self.process_instance_model, - bpmn_process_parent, + bpmn_process_dict=subprocess_properties, + process_instance=self.process_instance_model, + bpmn_process_parent=bpmn_process_parent, bpmn_process_guid=subprocess_task_id, - bpmn_definition_identifiers_to_bpmn_process_id_mappings=self.bpmn_definition_identifiers_to_bpmn_process_id_mappings, + bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, spiff_workflow=self.bpmn_process_instance ) new_task_models.update(subprocess_new_task_models) @@ -1179,7 +1180,7 @@ class ProcessInstanceProcessor: def save(self) -> None: """Saves the current state of this processor to the database.""" - print("WE IN SAVE") + # print("WE IN SAVE") self._add_bpmn_json_records() self.process_instance_model.spiff_serializer_version = self.SERIALIZER_VERSION @@ -1307,7 +1308,7 @@ class ProcessInstanceProcessor: try: self.bpmn_process_instance.catch(event_definition) except Exception as e: - print(e) + print(e) # TODO: do_engine_steps without a lock self.do_engine_steps(save=True) @@ -1689,7 +1690,7 @@ class ProcessInstanceProcessor: secondary_engine_step_delegate=step_delegate, serializer=self._serializer, process_instance=self.process_instance_model, - bpmn_definition_identifiers_to_bpmn_process_id_mappings=self.bpmn_definition_identifiers_to_bpmn_process_id_mappings, + bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, ) if execution_strategy_name is None: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index f782241f..76a957b2 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -51,7 +51,6 @@ class TaskService: task_model: TaskModel, spiff_task: SpiffTask, serializer: BpmnWorkflowSerializer, - bpmn_definition_identifiers_to_bpmn_process_id_mappings: Optional[dict] = None, ) -> Optional[JsonDataDict]: """Updates properties_json and data on given task_model. @@ -73,7 +72,7 @@ class TaskService: spiff_task: SpiffTask, process_instance: ProcessInstanceModel, serializer: BpmnWorkflowSerializer, - bpmn_definition_identifiers_to_bpmn_process_id_mappings: Optional[dict] = None, + bpmn_definition_to_task_definitions_mappings: dict, ) -> Tuple[ Optional[BpmnProcessModel], TaskModel, @@ -89,11 +88,11 @@ class TaskService: new_json_data_dicts: dict[str, JsonDataDict] = {} if task_model is None: bpmn_process, new_task_models, new_json_data_dicts = cls.task_bpmn_process( - spiff_task, process_instance, serializer, bpmn_definition_identifiers_to_bpmn_process_id_mappings=bpmn_definition_identifiers_to_bpmn_process_id_mappings + spiff_task, process_instance, serializer, bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings ) task_model = TaskModel.query.filter_by(guid=spiff_task_guid).first() - task_definition = bpmn_definition_identifiers_to_bpmn_process_id_mappings[spiff_task.workflow.spec.name][spiff_task.task_spec.name] if task_model is None: + task_definition = bpmn_definition_to_task_definitions_mappings[spiff_task.workflow.spec.name][spiff_task.task_spec.name] task_model = TaskModel( guid=spiff_task_guid, bpmn_process_id=bpmn_process.id, process_instance_id=process_instance.id, task_definition_id=task_definition.id ) @@ -122,7 +121,7 @@ class TaskService: spiff_task: SpiffTask, process_instance: ProcessInstanceModel, serializer: BpmnWorkflowSerializer, - bpmn_definition_identifiers_to_bpmn_process_id_mappings: Optional[dict] = None, + bpmn_definition_to_task_definitions_mappings: dict, ) -> Tuple[BpmnProcessModel, dict[str, TaskModel], dict[str, JsonDataDict]]: subprocess_guid, subprocess = cls.task_subprocess(spiff_task) bpmn_process: Optional[BpmnProcessModel] = None @@ -136,11 +135,11 @@ class TaskService: spiff_workflow = spiff_task.workflow._get_outermost_workflow() bpmn_process, new_task_models, new_json_data_dicts = ( cls.add_bpmn_process( - serializer.workflow_to_dict( + bpmn_process_dict=serializer.workflow_to_dict( spiff_workflow ), - process_instance, - bpmn_definition_identifiers_to_bpmn_process_id_mappings=bpmn_definition_identifiers_to_bpmn_process_id_mappings, + process_instance=process_instance, + bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings, spiff_workflow=spiff_workflow, ) ) @@ -152,11 +151,11 @@ class TaskService: spiff_workflow = spiff_task.workflow bpmn_process, new_task_models, new_json_data_dicts = ( cls.add_bpmn_process( - serializer.workflow_to_dict(subprocess), - process_instance, - process_instance.bpmn_process, - subprocess_guid, - bpmn_definition_identifiers_to_bpmn_process_id_mappings=bpmn_definition_identifiers_to_bpmn_process_id_mappings, + bpmn_process_dict=serializer.workflow_to_dict(subprocess), + process_instance=process_instance, + bpmn_process_parent=process_instance.bpmn_process, + bpmn_process_guid=subprocess_guid, + bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings, spiff_workflow=spiff_workflow, ) @@ -168,10 +167,10 @@ class TaskService: cls, bpmn_process_dict: dict, process_instance: ProcessInstanceModel, + bpmn_definition_to_task_definitions_mappings: dict, + spiff_workflow: BpmnWorkflow, bpmn_process_parent: Optional[BpmnProcessModel] = None, bpmn_process_guid: Optional[str] = None, - bpmn_definition_identifiers_to_bpmn_process_id_mappings: Optional[dict] = None, - spiff_workflow: Optional[BpmnWorkflow] = None, ) -> Tuple[BpmnProcessModel, dict[str, TaskModel], dict[str, JsonDataDict]]: """This creates and adds a bpmn_process to the Db session. @@ -185,7 +184,6 @@ class TaskService: new_json_data_dicts: dict[str, JsonDataDict] = {} bpmn_process = None - print("ADD BPMN PROCESS") if bpmn_process_parent is not None: bpmn_process = BpmnProcessModel.query.filter_by( parent_process_id=bpmn_process_parent.id, guid=bpmn_process_guid @@ -208,7 +206,6 @@ class TaskService: bpmn_process_data_json.encode("utf8") ).hexdigest() if bpmn_process.json_data_hash != bpmn_process_data_hash: - # print(f"bpmn_process_data_dict: {bpmn_process_data_dict}") new_json_data_dicts[bpmn_process_data_hash] = { "hash": bpmn_process_data_hash, "data": bpmn_process_data_dict, @@ -232,32 +229,15 @@ class TaskService: task_properties['parent'] = None process_dict = bpmn_process.properties_json process_dict['root'] = task_id - # print(f"process_dict: {process_dict}") bpmn_process.properties_json = process_dict - # print(f"bpmn_process.properties_json: {bpmn_process.properties_json}") db.session.add(bpmn_process) task_data_dict = task_properties.pop("data") state_int = task_properties["state"] task_model = TaskModel.query.filter_by(guid=task_id).first() if task_model is None: - # bpmn_process_identifier = task_properties['workflow_name'] - # bpmn_identifier = task_properties['task_spec'] - # - # task_definition = TaskDefinitionModel.query.filter_by(bpmn_identifier=bpmn_identifier) - # .join(BpmnProcessDefinitionModel).filter(BpmnProcessDefinitionModel.bpmn_identifier==bpmn_process_identifier).first() - # if task_definition is None: - # subprocess_task = TaskModel.query.filter_by(guid=bpmn_process.guid) spiff_task = spiff_workflow.get_task(UUID(task_id)) - try: - task_definition = bpmn_definition_identifiers_to_bpmn_process_id_mappings[spiff_task.workflow.spec.name][spiff_task.task_spec.name] - except Exception as ex: - import pdb; pdb.set_trace() - print("HEY") - raise ex - task_model = TaskModel( - guid=task_id, bpmn_process_id=bpmn_process.id, process_instance_id=process_instance.id, task_definition_id=task_definition.id - ) + task_model = cls._create_task(bpmn_process, process_instance, spiff_task, bpmn_definition_to_task_definitions_mappings) task_model.state = TaskStateNames[state_int] task_model.properties_json = task_properties @@ -281,3 +261,12 @@ class TaskService: json_data_dict = {"hash": task_data_hash, "data": task_data_dict} task_model.json_data_hash = task_data_hash return json_data_dict + + @classmethod + def _create_task(cls, bpmn_process: BpmnProcessModel, process_instance: ProcessInstanceModel, spiff_task: SpiffTask, bpmn_definition_to_task_definitions_mappings: dict) -> TaskModel: + + task_definition = bpmn_definition_to_task_definitions_mappings[spiff_task.workflow.spec.name][spiff_task.task_spec.name] + task_model = TaskModel( + guid=str(spiff_task.id), bpmn_process_id=bpmn_process.id, process_instance_id=process_instance.id, task_definition_id=task_definition.id + ) + return task_model diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 9a4a6c7d..c1db70d9 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -57,12 +57,12 @@ class TaskModelSavingDelegate(EngineStepDelegate): self, serializer: BpmnWorkflowSerializer, process_instance: ProcessInstanceModel, + bpmn_definition_to_task_definitions_mappings: dict, secondary_engine_step_delegate: Optional[EngineStepDelegate] = None, - bpmn_definition_identifiers_to_bpmn_process_id_mappings: Optional[dict] = None, ) -> None: self.secondary_engine_step_delegate = secondary_engine_step_delegate self.process_instance = process_instance - self.bpmn_definition_identifiers_to_bpmn_process_id_mappings = bpmn_definition_identifiers_to_bpmn_process_id_mappings + self.bpmn_definition_to_task_definitions_mappings = bpmn_definition_to_task_definitions_mappings self.current_task_model: Optional[TaskModel] = None self.task_models: dict[str, TaskModel] = {} @@ -80,7 +80,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): if self.should_update_task_model(): _bpmn_process, task_model, new_task_models, new_json_data_dicts = ( TaskService.find_or_create_task_model_from_spiff_task( - spiff_task, self.process_instance, self.serializer, bpmn_definition_identifiers_to_bpmn_process_id_mappings=self.bpmn_definition_identifiers_to_bpmn_process_id_mappings + spiff_task, self.process_instance, self.serializer, bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings ) ) self.current_task_model = task_model @@ -94,7 +94,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): if self.current_task_model and self.should_update_task_model(): self.current_task_model.end_in_seconds = time.time() json_data_dict = TaskService.update_task_model( - self.current_task_model, spiff_task, self.serializer, bpmn_definition_identifiers_to_bpmn_process_id_mappings=self.bpmn_definition_identifiers_to_bpmn_process_id_mappings + self.current_task_model, spiff_task, self.serializer ) if json_data_dict is not None: self.json_data_dicts[json_data_dict["hash"]] = json_data_dict @@ -123,13 +123,13 @@ class TaskModelSavingDelegate(EngineStepDelegate): ): _bpmn_process, task_model, new_task_models, new_json_data_dicts = ( TaskService.find_or_create_task_model_from_spiff_task( - waiting_spiff_task, self.process_instance, self.serializer, bpmn_definition_identifiers_to_bpmn_process_id_mappings=self.bpmn_definition_identifiers_to_bpmn_process_id_mappings + waiting_spiff_task, self.process_instance, self.serializer, bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings ) ) self.task_models.update(new_task_models) self.json_data_dicts.update(new_json_data_dicts) json_data_dict = TaskService.update_task_model( - task_model, waiting_spiff_task, self.serializer, bpmn_definition_identifiers_to_bpmn_process_id_mappings=self.bpmn_definition_identifiers_to_bpmn_process_id_mappings + task_model, waiting_spiff_task, self.serializer ) self.task_models[task_model.guid] = task_model if json_data_dict is not None: From ae8a4de17566dc17d6935be2ebdb5768494e1726 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 15 Mar 2023 12:15:48 -0400 Subject: [PATCH 016/162] pyl and cleaned up debug code w/ burnettk --- spiffworkflow-backend/migrations/env.py | 2 - .../models/bpmn_process.py | 2 +- .../src/spiffworkflow_backend/models/task.py | 5 +- .../services/process_instance_processor.py | 136 +++++++++++------- .../services/task_service.py | 67 ++++++--- .../services/workflow_execution_service.py | 14 +- .../unit/test_error_handling_service.py | 1 - .../unit/test_process_instance_processor.py | 27 ++-- 8 files changed, 158 insertions(+), 96 deletions(-) diff --git a/spiffworkflow-backend/migrations/env.py b/spiffworkflow-backend/migrations/env.py index 68feded2..630e381a 100644 --- a/spiffworkflow-backend/migrations/env.py +++ b/spiffworkflow-backend/migrations/env.py @@ -1,5 +1,3 @@ -from __future__ import with_statement - import logging from logging.config import fileConfig diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py index faae68c1..f7e301e4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py @@ -1,7 +1,7 @@ from __future__ import annotations -from sqlalchemy.orm import relationship from sqlalchemy import ForeignKey +from sqlalchemy.orm import relationship from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index 14746d6f..99ccb61b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -1,8 +1,5 @@ """Task.""" import enum - -from sqlalchemy.orm import relationship -from spiffworkflow_backend.models.task_definition import TaskDefinitionModel from dataclasses import dataclass from typing import Any from typing import Optional @@ -13,10 +10,12 @@ from marshmallow import Schema from marshmallow_enum import EnumField # type: ignore from SpiffWorkflow.task import TaskStateNames # type: ignore from sqlalchemy import ForeignKey +from sqlalchemy.orm import relationship from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel +from spiffworkflow_backend.models.task_definition import TaskDefinitionModel class MultiInstanceType(enum.Enum): diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 17180137..6b80fbcf 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -53,7 +53,6 @@ from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore from spiffworkflow_backend.exceptions.api_error import ApiError -from spiffworkflow_backend.models import task_definition from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.bpmn_process_definition import ( BpmnProcessDefinitionModel, @@ -457,7 +456,7 @@ class ProcessInstanceProcessor: self.process_instance_model = process_instance_model self.process_model_service = ProcessModelService() bpmn_process_spec = None - self.full_bpmn_process_dict = {} + self.full_bpmn_process_dict: dict = {} # this caches the bpmn_process_definition_identifier and task_identifier back to the bpmn_process_id # in the database. This is to cut down on database queries while adding new tasks to the database. @@ -465,7 +464,7 @@ class ProcessInstanceProcessor: # { "bpmn_process_definition_identifier": { "task_identifier": task_definition } } # To use from a spiff_task: # [spiff_task.workflow.spec.name][spiff_task.task_spec.name] - self.bpmn_definition_to_task_definitions_mappings = {} + self.bpmn_definition_to_task_definitions_mappings: dict = {} subprocesses: Optional[IdToBpmnProcessSpecMapping] = None if process_instance_model.bpmn_process_definition_id is None: @@ -482,13 +481,15 @@ class ProcessInstanceProcessor: ) try: - (self.bpmn_process_instance, self.full_bpmn_process_dict, self.bpmn_definition_to_task_definitions_mappings) = ( - self.__get_bpmn_process_instance( - process_instance_model, - bpmn_process_spec, - validate_only, - subprocesses=subprocesses, - ) + ( + self.bpmn_process_instance, + self.full_bpmn_process_dict, + self.bpmn_definition_to_task_definitions_mappings, + ) = self.__get_bpmn_process_instance( + process_instance_model, + bpmn_process_spec, + validate_only, + subprocesses=subprocesses, ) self.set_script_engine(self.bpmn_process_instance) @@ -549,18 +550,27 @@ class ProcessInstanceProcessor: @classmethod def _update_bpmn_definition_mappings( - cls, bpmn_definition_to_task_definitions_mappings: dict, bpmn_process_definition_identifier: str, task_definition: TaskDefinitionModel + cls, + bpmn_definition_to_task_definitions_mappings: dict, + bpmn_process_definition_identifier: str, + task_definition: TaskDefinitionModel, ) -> None: - # import pdb; pdb.set_trace() - # if bpmn_process_definition_identifier == 'test_process_to_call' and task_definition.bpmn_identifier == "Root": - # import pdb; pdb.set_trace() - if bpmn_process_definition_identifier not in bpmn_definition_to_task_definitions_mappings: - bpmn_definition_to_task_definitions_mappings[bpmn_process_definition_identifier] = {} - bpmn_definition_to_task_definitions_mappings[bpmn_process_definition_identifier][task_definition.bpmn_identifier] = task_definition + if ( + bpmn_process_definition_identifier + not in bpmn_definition_to_task_definitions_mappings + ): + bpmn_definition_to_task_definitions_mappings[ + bpmn_process_definition_identifier + ] = {} + bpmn_definition_to_task_definitions_mappings[ + bpmn_process_definition_identifier + ][task_definition.bpmn_identifier] = task_definition @classmethod def _get_definition_dict_for_bpmn_process_definition( - cls, bpmn_process_definition: BpmnProcessDefinitionModel, bpmn_definition_to_task_definitions_mappings: dict + cls, + bpmn_process_definition: BpmnProcessDefinitionModel, + bpmn_definition_to_task_definitions_mappings: dict, ) -> dict: task_definitions = TaskDefinitionModel.query.filter_by( bpmn_process_definition_id=bpmn_process_definition.id @@ -571,7 +581,11 @@ class ProcessInstanceProcessor: bpmn_process_definition_dict["task_specs"][ task_definition.bpmn_identifier ] = task_definition.properties_json - cls._update_bpmn_definition_mappings(bpmn_definition_to_task_definitions_mappings, bpmn_process_definition.bpmn_identifier, task_definition) + cls._update_bpmn_definition_mappings( + bpmn_definition_to_task_definitions_mappings, + bpmn_process_definition.bpmn_identifier, + task_definition, + ) return bpmn_process_definition_dict @classmethod @@ -618,7 +632,11 @@ class ProcessInstanceProcessor: task_definition.bpmn_process_definition_id ] ) - cls._update_bpmn_definition_mappings(bpmn_definition_to_task_definitions_mappings, bpmn_subprocess_definition_bpmn_identifier, task_definition) + cls._update_bpmn_definition_mappings( + bpmn_definition_to_task_definitions_mappings, + bpmn_subprocess_definition_bpmn_identifier, + task_definition, + ) spiff_bpmn_process_dict["subprocess_specs"][ bpmn_subprocess_definition_bpmn_identifier ]["task_specs"][ @@ -667,7 +685,9 @@ class ProcessInstanceProcessor: @classmethod def _get_full_bpmn_process_dict( - cls, process_instance_model: ProcessInstanceModel, bpmn_definition_to_task_definitions_mappings: dict + cls, + process_instance_model: ProcessInstanceModel, + bpmn_definition_to_task_definitions_mappings: dict, ) -> dict: if process_instance_model.bpmn_process_definition_id is None: return {} @@ -682,11 +702,14 @@ class ProcessInstanceProcessor: if bpmn_process_definition is not None: spiff_bpmn_process_dict["spec"] = ( cls._get_definition_dict_for_bpmn_process_definition( - bpmn_process_definition, bpmn_definition_to_task_definitions_mappings + bpmn_process_definition, + bpmn_definition_to_task_definitions_mappings, ) ) cls._set_definition_dict_for_bpmn_subprocess_definitions( - bpmn_process_definition, spiff_bpmn_process_dict, bpmn_definition_to_task_definitions_mappings + bpmn_process_definition, + spiff_bpmn_process_dict, + bpmn_definition_to_task_definitions_mappings, ) bpmn_process = process_instance_model.bpmn_process @@ -755,8 +778,7 @@ class ProcessInstanceProcessor: subprocesses: Optional[IdToBpmnProcessSpecMapping] = None, ) -> Tuple[BpmnWorkflow, dict, dict]: full_bpmn_process_dict = {} - bpmn_definition_to_task_definitions_mappings = {} - # print("GET BPMN PROCESS INSTANCE") + bpmn_definition_to_task_definitions_mappings: dict = {} if process_instance_model.bpmn_process_definition_id is not None: # turn off logging to avoid duplicated spiff logs spiff_logger = logging.getLogger("spiff") @@ -766,10 +788,10 @@ class ProcessInstanceProcessor: try: full_bpmn_process_dict = ( ProcessInstanceProcessor._get_full_bpmn_process_dict( - process_instance_model, bpmn_definition_to_task_definitions_mappings + process_instance_model, + bpmn_definition_to_task_definitions_mappings, ) ) - # print("WE GOT FULL BPMN PROCESS DICT") bpmn_process_instance = ( ProcessInstanceProcessor._serializer.workflow_from_dict( full_bpmn_process_dict @@ -782,17 +804,19 @@ class ProcessInstanceProcessor: ProcessInstanceProcessor.set_script_engine(bpmn_process_instance) else: - # print("WE NO HAVE FULL BPMN YET") bpmn_process_instance = ( ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec( spec, subprocesses ) ) - # import pdb; pdb.set_trace() bpmn_process_instance.data[ ProcessInstanceProcessor.VALIDATION_PROCESS_KEY ] = validate_only - return (bpmn_process_instance, full_bpmn_process_dict, bpmn_definition_to_task_definitions_mappings) + return ( + bpmn_process_instance, + full_bpmn_process_dict, + bpmn_definition_to_task_definitions_mappings, + ) def slam_in_data(self, data: dict) -> None: """Slam_in_data.""" @@ -1063,13 +1087,7 @@ class ProcessInstanceProcessor: bpmn_process_definition: Optional[BpmnProcessDefinitionModel] = ( BpmnProcessDefinitionModel.query.filter_by(hash=new_hash_digest).first() ) - # print(f"process_bpmn_properties: {process_bpmn_properties}") - # import pdb; pdb.set_trace() - # if process_bpmn_identifier == "test_process_to_call": - # import pdb; pdb.set_trace() - # # print("HEY22") - # print(f"self.process_instance_model.id: {self.process_instance_model.id}") if bpmn_process_definition is None: task_specs = process_bpmn_properties.pop("task_specs") bpmn_process_definition = BpmnProcessDefinitionModel( @@ -1088,13 +1106,23 @@ class ProcessInstanceProcessor: ) db.session.add(task_definition) if store_bpmn_definition_mappings: - self._update_bpmn_definition_mappings(self.bpmn_definition_to_task_definitions_mappings, process_bpmn_identifier, task_definition) + self._update_bpmn_definition_mappings( + self.bpmn_definition_to_task_definitions_mappings, + process_bpmn_identifier, + task_definition, + ) elif store_bpmn_definition_mappings: # this should only ever happen when new process instances use a pre-existing bpmn process definitions # otherwise this should get populated on processor initialization - task_definitions = TaskDefinitionModel.query.filter_by(bpmn_process_definition_id=bpmn_process_definition.id).all() + task_definitions = TaskDefinitionModel.query.filter_by( + bpmn_process_definition_id=bpmn_process_definition.id + ).all() for task_definition in task_definitions: - self._update_bpmn_definition_mappings(self.bpmn_definition_to_task_definitions_mappings, process_bpmn_identifier, task_definition) + self._update_bpmn_definition_mappings( + self.bpmn_definition_to_task_definitions_mappings, + process_bpmn_identifier, + task_definition, + ) if bpmn_process_definition_parent is not None: bpmn_process_definition_relationship = ( @@ -1113,16 +1141,19 @@ class ProcessInstanceProcessor: def _add_bpmn_process_definitions(self, bpmn_spec_dict: dict) -> None: # store only if mappings is currently empty. this also would mean this is a new instance that has never saved before - # print("WE STORE BPM PROCESS DEF") - store_bpmn_definition_mappings = not self.bpmn_definition_to_task_definitions_mappings + store_bpmn_definition_mappings = ( + not self.bpmn_definition_to_task_definitions_mappings + ) bpmn_process_definition_parent = self._store_bpmn_process_definition( - bpmn_spec_dict["spec"], store_bpmn_definition_mappings=store_bpmn_definition_mappings + bpmn_spec_dict["spec"], + store_bpmn_definition_mappings=store_bpmn_definition_mappings, ) for process_bpmn_properties in bpmn_spec_dict["subprocess_specs"].values(): self._store_bpmn_process_definition( - process_bpmn_properties, bpmn_process_definition_parent, store_bpmn_definition_mappings=store_bpmn_definition_mappings + process_bpmn_properties, + bpmn_process_definition_parent, + store_bpmn_definition_mappings=store_bpmn_definition_mappings, ) - # import pdb; pdb.set_trace() self.process_instance_model.bpmn_process_definition = ( bpmn_process_definition_parent ) @@ -1132,7 +1163,6 @@ class ProcessInstanceProcessor: Expects the save method to commit it. """ - # print("WE SAVE THINGS") bpmn_dict = self.serialize() bpmn_dict_keys = ("spec", "subprocess_specs", "serializer_version") process_instance_data_dict = {} @@ -1143,12 +1173,9 @@ class ProcessInstanceProcessor: else: process_instance_data_dict[bpmn_key] = bpmn_dict[bpmn_key] - # FIXME: always save new hash until we get updated Spiff without loopresettask - # if self.process_instance_model.bpmn_process_definition_id is None: - self._add_bpmn_process_definitions(bpmn_spec_dict) - # import pdb; pdb.set_trace() - # print("WE NOW STORE BPMN PROCESS STUFFS") - # print(f"bpmn_definition_to_task_definitions_mappings: {self.bpmn_definition_to_task_definitions_mappings}") + # we may have to already process bpmn_defintions if we ever care about the Root task again + if self.process_instance_model.bpmn_process_definition_id is None: + self._add_bpmn_process_definitions(bpmn_spec_dict) subprocesses = process_instance_data_dict.pop("subprocesses") bpmn_process_parent, new_task_models, new_json_data_dicts = ( @@ -1156,7 +1183,7 @@ class ProcessInstanceProcessor: bpmn_process_dict=process_instance_data_dict, process_instance=self.process_instance_model, bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, - spiff_workflow=self.bpmn_process_instance + spiff_workflow=self.bpmn_process_instance, ) ) for subprocess_task_id, subprocess_properties in subprocesses.items(): @@ -1170,7 +1197,7 @@ class ProcessInstanceProcessor: bpmn_process_parent=bpmn_process_parent, bpmn_process_guid=subprocess_task_id, bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, - spiff_workflow=self.bpmn_process_instance + spiff_workflow=self.bpmn_process_instance, ) new_task_models.update(subprocess_new_task_models) new_json_data_dicts.update(subprocess_new_json_data_models) @@ -1180,7 +1207,6 @@ class ProcessInstanceProcessor: def save(self) -> None: """Saves the current state of this processor to the database.""" - # print("WE IN SAVE") self._add_bpmn_json_records() self.process_instance_model.spiff_serializer_version = self.SERIALIZER_VERSION @@ -1308,7 +1334,7 @@ class ProcessInstanceProcessor: try: self.bpmn_process_instance.catch(event_definition) except Exception as e: - print(e) + print(e) # TODO: do_engine_steps without a lock self.do_engine_steps(save=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 76a957b2..1d81bc59 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -3,6 +3,7 @@ from hashlib import sha256 from typing import Optional from typing import Tuple from typing import TypedDict +from uuid import UUID from flask import current_app from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflow # type: ignore @@ -11,7 +12,6 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskStateNames from sqlalchemy.dialects.mysql import insert as mysql_insert from sqlalchemy.dialects.postgresql import insert as postgres_insert -from uuid import UUID from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.db import db @@ -88,13 +88,21 @@ class TaskService: new_json_data_dicts: dict[str, JsonDataDict] = {} if task_model is None: bpmn_process, new_task_models, new_json_data_dicts = cls.task_bpmn_process( - spiff_task, process_instance, serializer, bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings + spiff_task, + process_instance, + serializer, + bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings, ) task_model = TaskModel.query.filter_by(guid=spiff_task_guid).first() if task_model is None: - task_definition = bpmn_definition_to_task_definitions_mappings[spiff_task.workflow.spec.name][spiff_task.task_spec.name] + task_definition = bpmn_definition_to_task_definitions_mappings[ + spiff_task.workflow.spec.name + ][spiff_task.task_spec.name] task_model = TaskModel( - guid=spiff_task_guid, bpmn_process_id=bpmn_process.id, process_instance_id=process_instance.id, task_definition_id=task_definition.id + guid=spiff_task_guid, + bpmn_process_id=bpmn_process.id, + process_instance_id=process_instance.id, + task_definition_id=task_definition.id, ) return (bpmn_process, task_model, new_task_models, new_json_data_dicts) @@ -135,9 +143,7 @@ class TaskService: spiff_workflow = spiff_task.workflow._get_outermost_workflow() bpmn_process, new_task_models, new_json_data_dicts = ( cls.add_bpmn_process( - bpmn_process_dict=serializer.workflow_to_dict( - spiff_workflow - ), + bpmn_process_dict=serializer.workflow_to_dict(spiff_workflow), process_instance=process_instance, bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings, spiff_workflow=spiff_workflow, @@ -157,7 +163,6 @@ class TaskService: bpmn_process_guid=subprocess_guid, bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings, spiff_workflow=spiff_workflow, - ) ) return (bpmn_process, new_task_models, new_json_data_dicts) @@ -195,9 +200,12 @@ class TaskService: if bpmn_process is None: bpmn_process_is_new = True bpmn_process = BpmnProcessModel(guid=bpmn_process_guid) + + # Point the root id to the Start task instead of the Root task + # since we are ignoring the Root task. for task_id, task_properties in tasks.items(): - if task_properties['task_spec'] == 'Start': - bpmn_process_dict['root'] = task_id + if task_properties["task_spec"] == "Start": + bpmn_process_dict["root"] = task_id bpmn_process.properties_json = bpmn_process_dict @@ -223,21 +231,26 @@ class TaskService: if bpmn_process_is_new: for task_id, task_properties in tasks.items(): - if task_properties['task_spec'] == 'Root': + # The Root task is added to the spec by Spiff when the bpmn process is instantiated + # within Spiff. We do not actually need it and it's missing from our initial + # bpmn process defintion so let's avoid using it. + if task_properties["task_spec"] == "Root": continue - if task_properties['task_spec'] == 'Start': - task_properties['parent'] = None - process_dict = bpmn_process.properties_json - process_dict['root'] = task_id - bpmn_process.properties_json = process_dict - db.session.add(bpmn_process) + if task_properties["task_spec"] == "Start": + task_properties["parent"] = None + task_data_dict = task_properties.pop("data") state_int = task_properties["state"] task_model = TaskModel.query.filter_by(guid=task_id).first() if task_model is None: spiff_task = spiff_workflow.get_task(UUID(task_id)) - task_model = cls._create_task(bpmn_process, process_instance, spiff_task, bpmn_definition_to_task_definitions_mappings) + task_model = cls._create_task( + bpmn_process, + process_instance, + spiff_task, + bpmn_definition_to_task_definitions_mappings, + ) task_model.state = TaskStateNames[state_int] task_model.properties_json = task_properties @@ -263,10 +276,20 @@ class TaskService: return json_data_dict @classmethod - def _create_task(cls, bpmn_process: BpmnProcessModel, process_instance: ProcessInstanceModel, spiff_task: SpiffTask, bpmn_definition_to_task_definitions_mappings: dict) -> TaskModel: - - task_definition = bpmn_definition_to_task_definitions_mappings[spiff_task.workflow.spec.name][spiff_task.task_spec.name] + def _create_task( + cls, + bpmn_process: BpmnProcessModel, + process_instance: ProcessInstanceModel, + spiff_task: SpiffTask, + bpmn_definition_to_task_definitions_mappings: dict, + ) -> TaskModel: + task_definition = bpmn_definition_to_task_definitions_mappings[ + spiff_task.workflow.spec.name + ][spiff_task.task_spec.name] task_model = TaskModel( - guid=str(spiff_task.id), bpmn_process_id=bpmn_process.id, process_instance_id=process_instance.id, task_definition_id=task_definition.id + guid=str(spiff_task.id), + bpmn_process_id=bpmn_process.id, + process_instance_id=process_instance.id, + task_definition_id=task_definition.id, ) return task_model diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index c1db70d9..be13342a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -62,7 +62,9 @@ class TaskModelSavingDelegate(EngineStepDelegate): ) -> None: self.secondary_engine_step_delegate = secondary_engine_step_delegate self.process_instance = process_instance - self.bpmn_definition_to_task_definitions_mappings = bpmn_definition_to_task_definitions_mappings + self.bpmn_definition_to_task_definitions_mappings = ( + bpmn_definition_to_task_definitions_mappings + ) self.current_task_model: Optional[TaskModel] = None self.task_models: dict[str, TaskModel] = {} @@ -80,7 +82,10 @@ class TaskModelSavingDelegate(EngineStepDelegate): if self.should_update_task_model(): _bpmn_process, task_model, new_task_models, new_json_data_dicts = ( TaskService.find_or_create_task_model_from_spiff_task( - spiff_task, self.process_instance, self.serializer, bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings + spiff_task, + self.process_instance, + self.serializer, + bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, ) ) self.current_task_model = task_model @@ -123,7 +128,10 @@ class TaskModelSavingDelegate(EngineStepDelegate): ): _bpmn_process, task_model, new_task_models, new_json_data_dicts = ( TaskService.find_or_create_task_model_from_spiff_task( - waiting_spiff_task, self.process_instance, self.serializer, bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings + waiting_spiff_task, + self.process_instance, + self.serializer, + bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, ) ) self.task_models.update(new_task_models) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py index 4566625a..9d481788 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py @@ -33,7 +33,6 @@ class TestErrorHandlingService(BaseTest): process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier( process_model.id, user ) - print(f"process_instance.id: {process_instance.id}") pip = ProcessInstanceProcessor(process_instance) with pytest.raises(ApiError) as e: pip.do_engine_steps(save=True) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 6bd7a305..ac1a286e 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -378,24 +378,33 @@ class TestProcessInstanceProcessor(BaseTest): assert len(all_spiff_tasks) > 1 for spiff_task in all_spiff_tasks: assert spiff_task.state == TaskState.COMPLETED - if spiff_task.task_spec.name == 'test_process_to_call_script': + if spiff_task.task_spec.name == "test_process_to_call_script": task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() assert task.task_definition_id is not None task_definition = task.task_definition - assert task_definition.bpmn_identifier == 'test_process_to_call_script' - assert task_definition.bpmn_process_definition.bpmn_identifier == 'test_process_to_call' - elif spiff_task.task_spec.name == 'top_level_subprocess_script': + assert task_definition.bpmn_identifier == "test_process_to_call_script" + assert ( + task_definition.bpmn_process_definition.bpmn_identifier + == "test_process_to_call" + ) + elif spiff_task.task_spec.name == "top_level_subprocess_script": task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() assert task.task_definition_id is not None task_definition = task.task_definition - assert task_definition.bpmn_identifier == 'top_level_subprocess_script' - assert task_definition.bpmn_process_definition.bpmn_identifier == 'top_level_subprocess' - if spiff_task.task_spec.name == 'top_level_script': + assert task_definition.bpmn_identifier == "top_level_subprocess_script" + assert ( + task_definition.bpmn_process_definition.bpmn_identifier + == "top_level_subprocess" + ) + if spiff_task.task_spec.name == "top_level_script": task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() assert task.task_definition_id is not None task_definition = task.task_definition - assert task_definition.bpmn_identifier == 'top_level_script' - assert task_definition.bpmn_process_definition.bpmn_identifier == 'top_level_process' + assert task_definition.bpmn_identifier == "top_level_script" + assert ( + task_definition.bpmn_process_definition.bpmn_identifier + == "top_level_process" + ) # FIXME: Checking task data cannot work with the feature/remove-loop-reset branch # of SiffWorkflow. This is because it saves script data to the python_env and NOT # to task.data. We may need to either create a new column on TaskModel to put the python_env From 738a2e007857590db7c20071bd9dd773f0ba5932 Mon Sep 17 00:00:00 2001 From: Dan Date: Wed, 15 Mar 2023 12:26:47 -0400 Subject: [PATCH 017/162] Columns should not be removed on reset, but any filters applied to those columns should be removed. --- .../src/components/ProcessInstanceListTable.tsx | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 9f8d258c..04710605 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -704,11 +704,8 @@ export default function ProcessInstanceListTable({ setEndToTime(''); setProcessInitiatorSelection(null); setProcessInitiatorText(''); - if (reportMetadata) { - reportMetadata.columns = reportMetadata.columns.filter( - (column) => !column.filterable - ); + reportMetadata.filter_by = []; } }; From 8e0324df632cb2cf63ca7f1582ddbdf73b17ade4 Mon Sep 17 00:00:00 2001 From: jbirddog <100367399+jbirddog@users.noreply.github.com> Date: Wed, 15 Mar 2023 12:32:55 -0400 Subject: [PATCH 018/162] Smaller locking window for the background processor (#183) --- .../process_instance_queue_service.py | 37 ++++++++++++++----- .../services/process_instance_service.py | 6 +-- 2 files changed, 31 insertions(+), 12 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py index d75d903f..a0aceb94 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py @@ -1,5 +1,6 @@ import time from typing import List +from typing import Optional from flask import current_app @@ -84,8 +85,33 @@ class ProcessInstanceQueueService: ProcessInstanceLockService.lock(process_instance.id, queue_entry) - @staticmethod + @classmethod + def entries_with_status( + cls, + status_value: str = ProcessInstanceStatus.waiting.value, + locked_by: Optional[str] = None, + ) -> List[ProcessInstanceQueueModel]: + return ( + db.session.query(ProcessInstanceQueueModel) + .filter( + ProcessInstanceQueueModel.status == status_value, + ProcessInstanceQueueModel.locked_by == locked_by, + ) + .all() + ) + + @classmethod + def peek_many( + cls, + status_value: str = ProcessInstanceStatus.waiting.value, + ) -> List[int]: + queue_entries = cls.entries_with_status(status_value, None) + ids_with_status = [entry.process_instance_id for entry in queue_entries] + return ids_with_status + + @classmethod def dequeue_many( + cls, status_value: str = ProcessInstanceStatus.waiting.value, ) -> List[int]: locked_by = ProcessInstanceLockService.locked_by() @@ -102,14 +128,7 @@ class ProcessInstanceQueueService: db.session.commit() - queue_entries = ( - db.session.query(ProcessInstanceQueueModel) - .filter( - ProcessInstanceQueueModel.status == status_value, - ProcessInstanceQueueModel.locked_by == locked_by, - ) - .all() - ) + queue_entries = cls.entries_with_status(status_value, locked_by) locked_ids = ProcessInstanceLockService.lock_many(queue_entries) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index dfeb2bde..23ce9a22 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -84,15 +84,15 @@ class ProcessInstanceService: @staticmethod def do_waiting(status_value: str = ProcessInstanceStatus.waiting.value) -> None: """Do_waiting.""" - locked_process_instance_ids = ProcessInstanceQueueService.dequeue_many( + process_instance_ids_to_check = ProcessInstanceQueueService.peek_many( status_value ) - if len(locked_process_instance_ids) == 0: + if len(process_instance_ids_to_check) == 0: return records = ( db.session.query(ProcessInstanceModel) - .filter(ProcessInstanceModel.id.in_(locked_process_instance_ids)) # type: ignore + .filter(ProcessInstanceModel.id.in_(process_instance_ids_to_check)) # type: ignore .all() ) process_instance_lock_prefix = "Background" From b996048418f400deb2c84e877659d431ec2bb968 Mon Sep 17 00:00:00 2001 From: burnettk Date: Wed, 15 Mar 2023 14:14:45 -0400 Subject: [PATCH 019/162] lint --- .../routes/process_instances_controller.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 3aaa418f..1c9e2758 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -15,6 +15,9 @@ from flask import request from flask.wrappers import Response from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState +from sqlalchemy import and_ +from sqlalchemy import or_ + from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.human_task import HumanTaskModel @@ -70,8 +73,6 @@ from spiffworkflow_backend.services.process_instance_service import ( ) from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.spec_file_service import SpecFileService -from sqlalchemy import and_ -from sqlalchemy import or_ def process_instance_create( From 181e4ef81b8cde15ddcc513ee47f67db69aa8d7c Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 15 Mar 2023 15:38:58 -0400 Subject: [PATCH 020/162] check data when running main spiff test w/ burnettk --- spiffworkflow-backend/migrations/env.py | 2 + .../{99f1b5156b06_.py => 434e6494e8ff_.py} | 9 +- .../spiffworkflow_backend/models/json_data.py | 17 ++++ .../src/spiffworkflow_backend/models/task.py | 9 ++ .../services/process_instance_processor.py | 21 ++--- .../services/task_service.py | 35 ++++++-- .../services/workflow_execution_service.py | 18 ++-- .../unit/test_error_handling_service.py | 1 + .../unit/test_process_instance_processor.py | 87 ++++++++++--------- 9 files changed, 128 insertions(+), 71 deletions(-) rename spiffworkflow-backend/migrations/versions/{99f1b5156b06_.py => 434e6494e8ff_.py} (98%) diff --git a/spiffworkflow-backend/migrations/env.py b/spiffworkflow-backend/migrations/env.py index 630e381a..68feded2 100644 --- a/spiffworkflow-backend/migrations/env.py +++ b/spiffworkflow-backend/migrations/env.py @@ -1,3 +1,5 @@ +from __future__ import with_statement + import logging from logging.config import fileConfig diff --git a/spiffworkflow-backend/migrations/versions/99f1b5156b06_.py b/spiffworkflow-backend/migrations/versions/434e6494e8ff_.py similarity index 98% rename from spiffworkflow-backend/migrations/versions/99f1b5156b06_.py rename to spiffworkflow-backend/migrations/versions/434e6494e8ff_.py index 9407aeaf..3663be8a 100644 --- a/spiffworkflow-backend/migrations/versions/99f1b5156b06_.py +++ b/spiffworkflow-backend/migrations/versions/434e6494e8ff_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: 99f1b5156b06 +Revision ID: 434e6494e8ff Revises: -Create Date: 2023-03-14 17:23:22.667853 +Create Date: 2023-03-15 12:25:48.665481 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. -revision = '99f1b5156b06' +revision = '434e6494e8ff' down_revision = None branch_labels = None depends_on = None @@ -356,6 +356,7 @@ def upgrade(): sa.Column('state', sa.String(length=10), nullable=False), sa.Column('properties_json', sa.JSON(), nullable=False), sa.Column('json_data_hash', sa.String(length=255), nullable=False), + sa.Column('python_env_data_hash', sa.String(length=255), nullable=False), sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), sa.ForeignKeyConstraint(['bpmn_process_id'], ['bpmn_process.id'], ), @@ -365,6 +366,7 @@ def upgrade(): ) op.create_index(op.f('ix_task_guid'), 'task', ['guid'], unique=True) op.create_index(op.f('ix_task_json_data_hash'), 'task', ['json_data_hash'], unique=False) + op.create_index(op.f('ix_task_python_env_data_hash'), 'task', ['python_env_data_hash'], unique=False) op.create_table('human_task_user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('human_task_id', sa.Integer(), nullable=False), @@ -398,6 +400,7 @@ def downgrade(): op.drop_index(op.f('ix_human_task_user_user_id'), table_name='human_task_user') op.drop_index(op.f('ix_human_task_user_human_task_id'), table_name='human_task_user') op.drop_table('human_task_user') + op.drop_index(op.f('ix_task_python_env_data_hash'), table_name='task') op.drop_index(op.f('ix_task_json_data_hash'), table_name='task') op.drop_index(op.f('ix_task_guid'), table_name='task') op.drop_table('task') diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/json_data.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/json_data.py index 0723a50a..0713f527 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/json_data.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/json_data.py @@ -1,9 +1,14 @@ from __future__ import annotations +from typing import Optional from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel +class JsonDataModelNotFoundError(Exception): + pass + + # delta algorithm <- just to save it for when we want to try to implement it: # a = {"hey": { "hey2": 2, "hey3": 3, "hey6": 7 }, "hey30": 3, "hey40": 4} # b = {"hey": { "hey2": 4, "hey5": 3 }, "hey20": 2, "hey30": 3} @@ -27,3 +32,15 @@ class JsonDataModel(SpiffworkflowBaseDBModel): # this is a sha256 hash of spec and serializer_version hash: str = db.Column(db.String(255), nullable=False, index=True, unique=True) data: dict = db.Column(db.JSON, nullable=False) + + @classmethod + def find_object_by_hash(cls, hash: str) -> JsonDataModel: + json_data_model: Optional[JsonDataModel] = JsonDataModel.query.filter_by(hash=hash).first() + if json_data_model is None: + raise JsonDataModelNotFoundError(f"Could not find a json data model entry with hash: {hash}") + return json_data_model + + + @classmethod + def find_data_dict_by_hash(cls, hash: str) -> dict: + return cls.find_object_by_hash(hash).data diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index 99ccb61b..fc0d3262 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -15,6 +15,7 @@ from sqlalchemy.orm import relationship from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel +from spiffworkflow_backend.models.json_data import JsonDataModel from spiffworkflow_backend.models.task_definition import TaskDefinitionModel @@ -59,11 +60,19 @@ class TaskModel(SpiffworkflowBaseDBModel): state: str = db.Column(db.String(10), nullable=False) properties_json: dict = db.Column(db.JSON, nullable=False) + json_data_hash: str = db.Column(db.String(255), nullable=False, index=True) + python_env_data_hash: str = db.Column(db.String(255), nullable=False, index=True) start_in_seconds: float = db.Column(db.DECIMAL(17, 6)) end_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6)) + def python_env_data(self) -> dict: + return JsonDataModel.find_data_dict_by_hash(self.python_env_data_hash) + + def json_data(self) -> dict: + return JsonDataModel.find_data_dict_by_hash(self.json_data_hash) + class Task: """Task.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 6b80fbcf..806d8716 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1957,18 +1957,19 @@ class ProcessInstanceProcessor: db.session.add(details_model) # ####### - json_data_dict = TaskService.update_task_model( + json_data_dict_list = TaskService.update_task_model( task_model, spiff_task, self._serializer ) - if json_data_dict is not None: - json_data = ( - db.session.query(JsonDataModel.id) - .filter_by(hash=json_data_dict["hash"]) - .first() - ) - if json_data is None: - json_data = JsonDataModel(**json_data_dict) - db.session.add(json_data) + for json_data_dict in json_data_dict_list: + if json_data_dict is not None: + json_data = ( + db.session.query(JsonDataModel.id) + .filter_by(hash=json_data_dict["hash"]) + .first() + ) + if json_data is None: + json_data = JsonDataModel(**json_data_dict) + db.session.add(json_data) # this is the thing that actually commits the db transaction (on behalf of the other updates above as well) self.save() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 1d81bc59..5b2c7935 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -26,6 +26,8 @@ class JsonDataDict(TypedDict): class TaskService: + PYTHON_ENVIRONMENT_STATE_KEY = "spiff__python_env_state" + @classmethod def insert_or_update_json_data_records( cls, json_data_hash_to_json_data_dict_mapping: dict[str, JsonDataDict] @@ -51,7 +53,7 @@ class TaskService: task_model: TaskModel, spiff_task: SpiffTask, serializer: BpmnWorkflowSerializer, - ) -> Optional[JsonDataDict]: + ) -> list[Optional[JsonDataDict]]: """Updates properties_json and data on given task_model. This will NOT update start_in_seconds or end_in_seconds. @@ -59,12 +61,16 @@ class TaskService: """ new_properties_json = serializer.task_to_dict(spiff_task) spiff_task_data = new_properties_json.pop("data") + python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task) task_model.properties_json = new_properties_json task_model.state = TaskStateNames[new_properties_json["state"]] json_data_dict = cls._update_task_data_on_task_model( - task_model, spiff_task_data + task_model, spiff_task_data, "json_data_hash" ) - return json_data_dict + python_env_dict = cls._update_task_data_on_task_model( + task_model, python_env_data_dict, "python_env_data_hash" + ) + return [json_data_dict, python_env_dict] @classmethod def find_or_create_task_model_from_spiff_task( @@ -241,10 +247,10 @@ class TaskService: task_data_dict = task_properties.pop("data") state_int = task_properties["state"] + spiff_task = spiff_workflow.get_task(UUID(task_id)) task_model = TaskModel.query.filter_by(guid=task_id).first() if task_model is None: - spiff_task = spiff_workflow.get_task(UUID(task_id)) task_model = cls._create_task( bpmn_process, process_instance, @@ -253,26 +259,33 @@ class TaskService: ) task_model.state = TaskStateNames[state_int] task_model.properties_json = task_properties + new_task_models[task_model.guid] = task_model json_data_dict = TaskService._update_task_data_on_task_model( - task_model, task_data_dict + task_model, task_data_dict, "json_data_hash" ) - new_task_models[task_model.guid] = task_model if json_data_dict is not None: new_json_data_dicts[json_data_dict["hash"]] = json_data_dict + python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task) + python_env_dict = TaskService._update_task_data_on_task_model( + task_model, python_env_data_dict, "python_env_data_hash" + ) + if python_env_dict is not None: + new_json_data_dicts[python_env_dict["hash"]] = python_env_dict + return (bpmn_process, new_task_models, new_json_data_dicts) @classmethod def _update_task_data_on_task_model( - cls, task_model: TaskModel, task_data_dict: dict + cls, task_model: TaskModel, task_data_dict: dict, task_model_data_column: str ) -> Optional[JsonDataDict]: task_data_json = json.dumps(task_data_dict, sort_keys=True) task_data_hash: str = sha256(task_data_json.encode("utf8")).hexdigest() json_data_dict: Optional[JsonDataDict] = None - if task_model.json_data_hash != task_data_hash: + if getattr(task_model, task_model_data_column) != task_data_hash: json_data_dict = {"hash": task_data_hash, "data": task_data_dict} - task_model.json_data_hash = task_data_hash + setattr(task_model, task_model_data_column, task_data_hash) return json_data_dict @classmethod @@ -293,3 +306,7 @@ class TaskService: task_definition_id=task_definition.id, ) return task_model + + @classmethod + def _get_python_env_data_dict_from_spiff_task(cls, spiff_task: SpiffTask) -> dict: + return spiff_task.workflow.script_engine.environment.user_defined_state() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index be13342a..d9bf5bf8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -78,6 +78,12 @@ class TaskModelSavingDelegate(EngineStepDelegate): """ return self.process_instance.bpmn_process_id is not None + def _update_json_data_dicts_using_list(self, json_data_dict_list: list[Optional[JsonDataDict]]) -> None: + for json_data_dict in json_data_dict_list: + if json_data_dict is not None: + self.json_data_dicts[json_data_dict["hash"]] = json_data_dict + + def will_complete_task(self, spiff_task: SpiffTask) -> None: if self.should_update_task_model(): _bpmn_process, task_model, new_task_models, new_json_data_dicts = ( @@ -98,11 +104,10 @@ class TaskModelSavingDelegate(EngineStepDelegate): def did_complete_task(self, spiff_task: SpiffTask) -> None: if self.current_task_model and self.should_update_task_model(): self.current_task_model.end_in_seconds = time.time() - json_data_dict = TaskService.update_task_model( + json_data_dict_list = TaskService.update_task_model( self.current_task_model, spiff_task, self.serializer ) - if json_data_dict is not None: - self.json_data_dicts[json_data_dict["hash"]] = json_data_dict + self._update_json_data_dicts_using_list(json_data_dict_list) self.task_models[self.current_task_model.guid] = self.current_task_model if self.secondary_engine_step_delegate: self.secondary_engine_step_delegate.did_complete_task(spiff_task) @@ -126,7 +131,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): | TaskState.MAYBE | TaskState.LIKELY ): - _bpmn_process, task_model, new_task_models, new_json_data_dicts = ( + bpmn_process, task_model, new_task_models, new_json_data_dicts = ( TaskService.find_or_create_task_model_from_spiff_task( waiting_spiff_task, self.process_instance, @@ -136,12 +141,11 @@ class TaskModelSavingDelegate(EngineStepDelegate): ) self.task_models.update(new_task_models) self.json_data_dicts.update(new_json_data_dicts) - json_data_dict = TaskService.update_task_model( + json_data_dict_list = TaskService.update_task_model( task_model, waiting_spiff_task, self.serializer ) self.task_models[task_model.guid] = task_model - if json_data_dict is not None: - self.json_data_dicts[json_data_dict["hash"]] = json_data_dict + self._update_json_data_dicts_using_list(json_data_dict_list) class StepDetailLoggingDelegate(EngineStepDelegate): diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py index 9d481788..44060449 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py @@ -99,6 +99,7 @@ class TestErrorHandlingService(BaseTest): # Both send and receive messages should be generated, matched # and considered complete. messages = db.session.query(MessageInstanceModel).all() + # import pdb; pdb.set_trace() assert 2 == len(messages) assert "completed" == messages[0].status assert "completed" == messages[1].status diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index ac1a286e..b8cbb268 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -358,53 +358,56 @@ class TestProcessInstanceProcessor(BaseTest): processor_final = ProcessInstanceProcessor(process_instance_relookup) assert process_instance_relookup.status == "complete" - # first_data_set = {"set_in_top_level_script": 1} - # second_data_set = {**first_data_set, **{"set_in_top_level_subprocess": 1}} - # third_data_set = { - # **second_data_set, - # **{"set_in_test_process_to_call_script": 1}, - # } - # expected_task_data = { - # "top_level_script": first_data_set, - # "manual_task": first_data_set, - # "top_level_subprocess_script": second_data_set, - # "top_level_subprocess": second_data_set, - # "test_process_to_call_script": third_data_set, - # "top_level_call_activity": third_data_set, - # "end_event_of_manual_task_model": third_data_set, - # } + first_data_set = {"set_in_top_level_script": 1} + second_data_set = {**first_data_set, **{"set_in_top_level_subprocess": 1, "we_move_on": False}} + third_data_set = { + **second_data_set, + **{"set_in_test_process_to_call_script": 1}, + } + fourth_data_set = { + **third_data_set, + **{'a': 1, 'we_move_on': True} + } + expected_task_data = { + "top_level_script": first_data_set, + "manual_task": first_data_set, + "top_level_subprocess_script": second_data_set, + "top_level_subprocess": second_data_set, + "test_process_to_call_script": third_data_set, + "top_level_call_activity": third_data_set, + "end_event_of_manual_task_model": third_data_set, + "top_level_subprocess_script_second": fourth_data_set, + "test_process_to_call_script_second": fourth_data_set, + } + + spiff_tasks_checked_once: list = [] + + def assert_spiff_task_is_in_process(spiff_task_name: str, bpmn_process_identifier: str) -> None: + if spiff_task.task_spec.name == spiff_task_name: + expected_python_env_data = expected_task_data[spiff_task.task_spec.name] + if spiff_task.task_spec.name in spiff_tasks_checked_once: + expected_python_env_data = expected_task_data[f"{spiff_task.task_spec.name}_second"] + task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() + assert task.task_definition_id is not None + task_definition = task.task_definition + assert task_definition.bpmn_identifier == spiff_task_name + assert ( + task_definition.bpmn_process_definition.bpmn_identifier + == bpmn_process_identifier + ) + print(f"spiff_task_name: {spiff_task_name}") + print(f"task.json_data(): {task.json_data()}") + print(f"task.python_env_data(): {task.python_env_data()}") + assert task.python_env_data() == expected_python_env_data + spiff_tasks_checked_once.append(spiff_task.task_spec.name) all_spiff_tasks = processor_final.bpmn_process_instance.get_tasks() assert len(all_spiff_tasks) > 1 for spiff_task in all_spiff_tasks: assert spiff_task.state == TaskState.COMPLETED - if spiff_task.task_spec.name == "test_process_to_call_script": - task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() - assert task.task_definition_id is not None - task_definition = task.task_definition - assert task_definition.bpmn_identifier == "test_process_to_call_script" - assert ( - task_definition.bpmn_process_definition.bpmn_identifier - == "test_process_to_call" - ) - elif spiff_task.task_spec.name == "top_level_subprocess_script": - task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() - assert task.task_definition_id is not None - task_definition = task.task_definition - assert task_definition.bpmn_identifier == "top_level_subprocess_script" - assert ( - task_definition.bpmn_process_definition.bpmn_identifier - == "top_level_subprocess" - ) - if spiff_task.task_spec.name == "top_level_script": - task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() - assert task.task_definition_id is not None - task_definition = task.task_definition - assert task_definition.bpmn_identifier == "top_level_script" - assert ( - task_definition.bpmn_process_definition.bpmn_identifier - == "top_level_process" - ) + assert_spiff_task_is_in_process("test_process_to_call_script", "test_process_to_call") + assert_spiff_task_is_in_process("top_level_subprocess_script", "top_level_subprocess") + assert_spiff_task_is_in_process("top_level_script", "top_level_process") # FIXME: Checking task data cannot work with the feature/remove-loop-reset branch # of SiffWorkflow. This is because it saves script data to the python_env and NOT # to task.data. We may need to either create a new column on TaskModel to put the python_env From b04976e4b15f9d712cfdf003665120c1727ee389 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 15 Mar 2023 16:10:23 -0400 Subject: [PATCH 021/162] fixed up tests and ran pyl w/ burnettk --- spiffworkflow-backend/migrations/env.py | 2 - .../spiffworkflow_backend/models/json_data.py | 10 +++-- .../services/process_instance_processor.py | 5 ++- .../services/task_service.py | 26 ++++++++--- .../services/workflow_execution_service.py | 5 ++- .../unit/test_process_instance_processor.py | 45 ++++++++----------- 6 files changed, 50 insertions(+), 43 deletions(-) diff --git a/spiffworkflow-backend/migrations/env.py b/spiffworkflow-backend/migrations/env.py index 68feded2..630e381a 100644 --- a/spiffworkflow-backend/migrations/env.py +++ b/spiffworkflow-backend/migrations/env.py @@ -1,5 +1,3 @@ -from __future__ import with_statement - import logging from logging.config import fileConfig diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/json_data.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/json_data.py index 0713f527..95993e2e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/json_data.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/json_data.py @@ -1,5 +1,4 @@ from __future__ import annotations -from typing import Optional from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel @@ -35,12 +34,15 @@ class JsonDataModel(SpiffworkflowBaseDBModel): @classmethod def find_object_by_hash(cls, hash: str) -> JsonDataModel: - json_data_model: Optional[JsonDataModel] = JsonDataModel.query.filter_by(hash=hash).first() + json_data_model: JsonDataModel | None = JsonDataModel.query.filter_by( + hash=hash + ).first() if json_data_model is None: - raise JsonDataModelNotFoundError(f"Could not find a json data model entry with hash: {hash}") + raise JsonDataModelNotFoundError( + f"Could not find a json data model entry with hash: {hash}" + ) return json_data_model - @classmethod def find_data_dict_by_hash(cls, hash: str) -> dict: return cls.find_object_by_hash(hash).data diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 806d8716..89cea4ae 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1184,6 +1184,7 @@ class ProcessInstanceProcessor: process_instance=self.process_instance_model, bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, spiff_workflow=self.bpmn_process_instance, + serializer=self._serializer, ) ) for subprocess_task_id, subprocess_properties in subprocesses.items(): @@ -1198,6 +1199,7 @@ class ProcessInstanceProcessor: bpmn_process_guid=subprocess_task_id, bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, spiff_workflow=self.bpmn_process_instance, + serializer=self._serializer, ) new_task_models.update(subprocess_new_task_models) new_json_data_dicts.update(subprocess_new_json_data_models) @@ -1812,8 +1814,7 @@ class ProcessInstanceProcessor: """Serialize.""" self.check_task_data_size() self.preserve_script_engine_state() - # return self._serializer.workflow_to_dict(self.bpmn_process_instance) # type: ignore - return json.loads(self._serializer.serialize_json(self.bpmn_process_instance)) # type: ignore + return self._serializer.workflow_to_dict(self.bpmn_process_instance) # type: ignore def next_user_tasks(self) -> list[SpiffTask]: """Next_user_tasks.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 5b2c7935..e6ae791e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -38,7 +38,7 @@ class TaskService: if current_app.config["SPIFFWORKFLOW_BACKEND_DATABASE_TYPE"] == "mysql": insert_stmt = mysql_insert(JsonDataModel).values(list_of_dicts) on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update( - data=insert_stmt.inserted.data, status="U" + data=insert_stmt.inserted.data ) else: insert_stmt = postgres_insert(JsonDataModel).values(list_of_dicts) @@ -61,7 +61,9 @@ class TaskService: """ new_properties_json = serializer.task_to_dict(spiff_task) spiff_task_data = new_properties_json.pop("data") - python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task) + python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task( + spiff_task, serializer + ) task_model.properties_json = new_properties_json task_model.state = TaskStateNames[new_properties_json["state"]] json_data_dict = cls._update_task_data_on_task_model( @@ -153,6 +155,7 @@ class TaskService: process_instance=process_instance, bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings, spiff_workflow=spiff_workflow, + serializer=serializer, ) ) else: @@ -169,6 +172,7 @@ class TaskService: bpmn_process_guid=subprocess_guid, bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings, spiff_workflow=spiff_workflow, + serializer=serializer, ) ) return (bpmn_process, new_task_models, new_json_data_dicts) @@ -180,6 +184,7 @@ class TaskService: process_instance: ProcessInstanceModel, bpmn_definition_to_task_definitions_mappings: dict, spiff_workflow: BpmnWorkflow, + serializer: BpmnWorkflowSerializer, bpmn_process_parent: Optional[BpmnProcessModel] = None, bpmn_process_guid: Optional[str] = None, ) -> Tuple[BpmnProcessModel, dict[str, TaskModel], dict[str, JsonDataDict]]: @@ -267,7 +272,9 @@ class TaskService: if json_data_dict is not None: new_json_data_dicts[json_data_dict["hash"]] = json_data_dict - python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task) + python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task( + spiff_task, serializer + ) python_env_dict = TaskService._update_task_data_on_task_model( task_model, python_env_data_dict, "python_env_data_hash" ) @@ -278,7 +285,7 @@ class TaskService: @classmethod def _update_task_data_on_task_model( - cls, task_model: TaskModel, task_data_dict: dict, task_model_data_column: str + cls, task_model: TaskModel, task_data_dict: dict, task_model_data_column: str ) -> Optional[JsonDataDict]: task_data_json = json.dumps(task_data_dict, sort_keys=True) task_data_hash: str = sha256(task_data_json.encode("utf8")).hexdigest() @@ -308,5 +315,12 @@ class TaskService: return task_model @classmethod - def _get_python_env_data_dict_from_spiff_task(cls, spiff_task: SpiffTask) -> dict: - return spiff_task.workflow.script_engine.environment.user_defined_state() + def _get_python_env_data_dict_from_spiff_task( + cls, spiff_task: SpiffTask, serializer: BpmnWorkflowSerializer + ) -> dict: + user_defined_state = ( + spiff_task.workflow.script_engine.environment.user_defined_state() + ) + # this helps to convert items like datetime objects to be json serializable + converted_data: dict = serializer.data_converter.convert(user_defined_state) + return converted_data diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index d9bf5bf8..63a54bae 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -78,12 +78,13 @@ class TaskModelSavingDelegate(EngineStepDelegate): """ return self.process_instance.bpmn_process_id is not None - def _update_json_data_dicts_using_list(self, json_data_dict_list: list[Optional[JsonDataDict]]) -> None: + def _update_json_data_dicts_using_list( + self, json_data_dict_list: list[Optional[JsonDataDict]] + ) -> None: for json_data_dict in json_data_dict_list: if json_data_dict is not None: self.json_data_dicts[json_data_dict["hash"]] = json_data_dict - def will_complete_task(self, spiff_task: SpiffTask) -> None: if self.should_update_task_model(): _bpmn_process, task_model, new_task_models, new_json_data_dicts = ( diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index b8cbb268..827a3b3d 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -359,15 +359,15 @@ class TestProcessInstanceProcessor(BaseTest): assert process_instance_relookup.status == "complete" first_data_set = {"set_in_top_level_script": 1} - second_data_set = {**first_data_set, **{"set_in_top_level_subprocess": 1, "we_move_on": False}} + second_data_set = { + **first_data_set, + **{"set_in_top_level_subprocess": 1, "we_move_on": False}, + } third_data_set = { **second_data_set, **{"set_in_test_process_to_call_script": 1}, } - fourth_data_set = { - **third_data_set, - **{'a': 1, 'we_move_on': True} - } + fourth_data_set = {**third_data_set, **{"a": 1, "we_move_on": True}} expected_task_data = { "top_level_script": first_data_set, "manual_task": first_data_set, @@ -382,11 +382,16 @@ class TestProcessInstanceProcessor(BaseTest): spiff_tasks_checked_once: list = [] - def assert_spiff_task_is_in_process(spiff_task_name: str, bpmn_process_identifier: str) -> None: + # TODO: also check task data here from the spiff_task directly to ensure we hydrated spiff correctly + def assert_spiff_task_is_in_process( + spiff_task_name: str, bpmn_process_identifier: str + ) -> None: if spiff_task.task_spec.name == spiff_task_name: expected_python_env_data = expected_task_data[spiff_task.task_spec.name] if spiff_task.task_spec.name in spiff_tasks_checked_once: - expected_python_env_data = expected_task_data[f"{spiff_task.task_spec.name}_second"] + expected_python_env_data = expected_task_data[ + f"{spiff_task.task_spec.name}_second" + ] task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() assert task.task_definition_id is not None task_definition = task.task_definition @@ -395,9 +400,6 @@ class TestProcessInstanceProcessor(BaseTest): task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier ) - print(f"spiff_task_name: {spiff_task_name}") - print(f"task.json_data(): {task.json_data()}") - print(f"task.python_env_data(): {task.python_env_data()}") assert task.python_env_data() == expected_python_env_data spiff_tasks_checked_once.append(spiff_task.task_spec.name) @@ -405,24 +407,13 @@ class TestProcessInstanceProcessor(BaseTest): assert len(all_spiff_tasks) > 1 for spiff_task in all_spiff_tasks: assert spiff_task.state == TaskState.COMPLETED - assert_spiff_task_is_in_process("test_process_to_call_script", "test_process_to_call") - assert_spiff_task_is_in_process("top_level_subprocess_script", "top_level_subprocess") + assert_spiff_task_is_in_process( + "test_process_to_call_script", "test_process_to_call" + ) + assert_spiff_task_is_in_process( + "top_level_subprocess_script", "top_level_subprocess" + ) assert_spiff_task_is_in_process("top_level_script", "top_level_process") - # FIXME: Checking task data cannot work with the feature/remove-loop-reset branch - # of SiffWorkflow. This is because it saves script data to the python_env and NOT - # to task.data. We may need to either create a new column on TaskModel to put the python_env - # data or we could just shove it back onto the task data when adding to the database. - # Right now everything works in practice because the python_env data is on the top level workflow - # and so is always there but is also always the most recent. If we want to replace spiff_step_details - # with TaskModel then we'll need some way to store python_env on each task. - # spiff_task_name = spiff_task.task_spec.name - # if spiff_task_name in expected_task_data: - # spiff_task_data = expected_task_data[spiff_task_name] - # failure_message = ( - # f"Found unexpected task data on {spiff_task_name}. " - # f"Expected: {spiff_task_data}, Found: {spiff_task.data}" - # ) - # assert spiff_task.data == spiff_task_data, failure_message def test_does_not_recreate_human_tasks_on_multiple_saves( self, From e0959716e720d8625e18c975a902d3c688e32460 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 15 Mar 2023 16:15:15 -0400 Subject: [PATCH 022/162] use main branch of spiffworkflow w/ burnettk --- spiffworkflow-backend/poetry.lock | 6 +++--- spiffworkflow-backend/pyproject.toml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index 33503bb7..a8d70db3 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1894,8 +1894,8 @@ lxml = "*" [package.source] type = "git" url = "https://github.com/sartography/SpiffWorkflow" -reference = "feature/remove-loop-reset" -resolved_reference = "13034aaf12f62aa3914744ca05bc9a3e3b3c3452" +reference = "main" +resolved_reference = "f162aac43af3af18d1a55186aeccea154fb8b05d" [[package]] name = "SQLAlchemy" @@ -2274,7 +2274,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.12" -content-hash = "7ab6d5021406b573edfdca4f9e0f5e62c41a6f6ea09d34154df72454887e3670" +content-hash = "b9ea32912509637f1378d060771de7548d93953aa3db12d6a48098f7dc15205f" [metadata.files] alabaster = [ diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index 3b3f09aa..87f3a5d5 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -27,7 +27,7 @@ flask-marshmallow = "*" flask-migrate = "*" flask-restful = "*" werkzeug = "*" -SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "feature/remove-loop-reset"} +SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} # SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" } sentry-sdk = "^1.10" sphinx-autoapi = "^2.0" From 4b64f725f4c1b45689b4f8aef77be18db1bfb6bf Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 15 Mar 2023 16:24:08 -0400 Subject: [PATCH 023/162] increase line length from 88 to 119 for black w/ burnettk --- .pre-commit-config.yaml | 3 +- .../bin/get_bpmn_json_for_process_instance | 14 +- .../bin/import_tickets_for_command_line.py | 11 +- .../bin/import_tickets_for_script_task.py | 4 +- spiffworkflow-backend/bin/save_all_bpmn.py | 3 +- spiffworkflow-backend/noxfile.py | 8 +- .../src/spiffworkflow_backend/__init__.py | 48 +- .../spiffworkflow_backend/config/__init__.py | 46 +- .../spiffworkflow_backend/config/default.py | 70 +-- .../src/spiffworkflow_backend/config/demo.py | 3 +- .../config/local_development.py | 11 +- .../src/spiffworkflow_backend/config/qa2.py | 8 +- .../config/sartography.py | 7 +- .../spiffworkflow_backend/config/staging.py | 4 +- .../config/terraform_deployed_environment.py | 17 +- .../config/unit_testing.py | 8 +- .../exceptions/api_error.py | 15 +- .../models/bpmn_process.py | 4 +- .../src/spiffworkflow_backend/models/db.py | 8 +- .../src/spiffworkflow_backend/models/file.py | 4 +- .../src/spiffworkflow_backend/models/group.py | 4 +- .../models/human_task.py | 8 +- .../models/human_task_user.py | 4 +- .../spiffworkflow_backend/models/json_data.py | 8 +- .../models/message_instance.py | 32 +- .../models/message_instance_correlation.py | 8 +- .../models/permission_assignment.py | 4 +- .../models/permission_target.py | 4 +- .../models/process_group.py | 12 +- .../models/process_instance.py | 19 +- .../models/process_instance_file_data.py | 8 +- .../models/process_instance_metadata.py | 10 +- .../models/process_instance_queue.py | 4 +- .../models/process_instance_report.py | 32 +- .../models/process_model.py | 8 +- .../models/spec_reference.py | 4 +- .../models/spiff_step_details.py | 10 +- .../src/spiffworkflow_backend/models/task.py | 28 +- .../src/spiffworkflow_backend/models/user.py | 4 +- .../models/user_group_assignment.py | 4 +- .../models/user_group_assignment_waiting.py | 6 +- .../routes/messages_controller.py | 13 +- .../openid_blueprint/openid_blueprint.py | 9 +- .../routes/process_api_blueprint.py | 60 +-- .../routes/process_groups_controller.py | 49 +- .../routes/process_instances_controller.py | 136 ++--- .../routes/process_models_controller.py | 181 ++----- .../routes/script_unit_tests_controller.py | 38 +- .../routes/service_tasks_controller.py | 16 +- .../routes/tasks_controller.py | 138 ++---- .../src/spiffworkflow_backend/routes/user.py | 83 +--- .../routes/user_blueprint.py | 14 +- .../routes/users_controller.py | 5 +- .../delete_process_instances_with_criteria.py | 9 +- .../scripts/fact_service.py | 12 +- .../scripts/get_all_permissions.py | 7 +- .../scripts/get_current_user.py | 7 +- .../scripts/get_data_sizes.py | 10 +- .../scripts/get_encoded_file_data.py | 4 +- .../spiffworkflow_backend/scripts/get_env.py | 7 +- .../scripts/get_frontend_url.py | 7 +- .../scripts/get_group_members.py | 3 +- .../scripts/get_localtime.py | 7 +- .../scripts/get_process_info.py | 11 +- .../scripts/get_process_initiator_user.py | 4 +- .../scripts/get_secret.py | 7 +- .../scripts/markdown_file_download_link.py | 14 +- .../scripts/save_process_instance_metadata.py | 4 +- .../spiffworkflow_backend/scripts/script.py | 11 +- .../services/authentication_service.py | 33 +- .../services/authorization_service.py | 207 ++------ .../services/background_processing_service.py | 4 +- .../services/error_handling_service.py | 17 +- .../services/file_system_service.py | 19 +- .../services/git_service.py | 96 +--- .../services/group_service.py | 8 +- .../services/logging_service.py | 16 +- .../services/message_service.py | 43 +- .../services/process_instance_lock_service.py | 8 +- .../services/process_instance_processor.py | 464 +++++------------- .../process_instance_queue_service.py | 7 +- .../process_instance_report_service.py | 118 ++--- .../services/process_instance_service.py | 59 +-- .../services/process_model_service.py | 120 ++--- .../services/script_unit_test_runner.py | 17 +- .../services/secret_service.py | 18 +- .../services/service_task_service.py | 49 +- .../services/spec_file_service.py | 89 +--- .../services/task_service.py | 95 ++-- .../services/user_service.py | 55 +-- .../services/workflow_execution_service.py | 55 +-- .../helpers/base_test.py | 45 +- .../helpers/example_data.py | 8 +- .../integration/test_for_good_errors.py | 12 +- .../integration/test_logging_service.py | 8 +- .../integration/test_nested_groups.py | 34 +- .../integration/test_openid_blueprint.py | 8 +- .../integration/test_process_api.py | 413 ++++------------ .../test_process_instances_controller.py | 12 +- .../integration/test_secret_service.py | 28 +- .../scripts/test_get_all_permissions.py | 4 +- .../scripts/test_get_group_members.py | 4 +- .../test_get_last_user_completing_task.py | 17 +- .../scripts/test_get_localtime.py | 4 +- .../test_get_process_initiator_user.py | 13 +- .../test_save_process_instance_metadata.py | 8 +- .../unit/test_acceptance_test_fixtures.py | 12 +- .../unit/test_authorization_service.py | 132 ++--- .../unit/test_dot_notation.py | 8 +- .../unit/test_error_handling_service.py | 16 +- .../unit/test_message_instance.py | 75 +-- .../unit/test_message_service.py | 81 +-- .../unit/test_permission_target.py | 12 +- .../unit/test_permissions.py | 28 +- .../unit/test_process_group.py | 4 +- .../unit/test_process_instance_processor.py | 135 ++--- .../test_process_instance_report_service.py | 199 +++----- .../unit/test_process_instance_service.py | 13 +- .../unit/test_process_model.py | 52 +- .../unit/test_process_model_service.py | 8 +- .../unit/test_restricted_script_engine.py | 16 +- .../unit/test_script_unit_test_runner.py | 64 +-- .../unit/test_service_task_delegate.py | 21 +- .../unit/test_spec_file_service.py | 59 +-- .../unit/test_spiff_logging.py | 8 +- .../unit/test_user_service.py | 8 +- .../unit/test_various_bpmn_constructs.py | 8 +- 127 files changed, 1157 insertions(+), 3400 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9353025e..9a871089 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,8 +18,7 @@ repos: # --line-length because then we can avoid the fancy line wrapping in more instances and jason, kb, and elizabeth # kind of prefer long lines rather than cutely-formatted sets of lines. # TODO: enable when its safe to update the files - # args: [--preview, --line-length, "110"] - args: [--preview] + args: [--preview, --line-length, "119"] - id: check-added-large-files files: ^spiffworkflow-backend/ diff --git a/spiffworkflow-backend/bin/get_bpmn_json_for_process_instance b/spiffworkflow-backend/bin/get_bpmn_json_for_process_instance index 37f59a7d..bd02ae3d 100755 --- a/spiffworkflow-backend/bin/get_bpmn_json_for_process_instance +++ b/spiffworkflow-backend/bin/get_bpmn_json_for_process_instance @@ -21,22 +21,14 @@ def main(process_instance_id: str) -> None: os.environ[flask_env_key] = "whatevs" app = create_app() with app.app_context(): - process_instance = ProcessInstanceModel.query.filter_by( - id=process_instance_id - ).first() + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first() file_path = f"/var/tmp/{process_instance_id}_bpmn_json.json" if not process_instance: - raise Exception( - f"Could not find a process instance with id: {process_instance_id}" - ) + raise Exception(f"Could not find a process instance with id: {process_instance_id}") with open(file_path, "w", encoding="utf-8") as f: - f.write( - json.dumps( - ProcessInstanceProcessor._get_full_bpmn_json(process_instance) - ) - ) + f.write(json.dumps(ProcessInstanceProcessor._get_full_bpmn_json(process_instance))) print(f"Saved to {file_path}") diff --git a/spiffworkflow-backend/bin/import_tickets_for_command_line.py b/spiffworkflow-backend/bin/import_tickets_for_command_line.py index c89cc2a7..db7e35be 100644 --- a/spiffworkflow-backend/bin/import_tickets_for_command_line.py +++ b/spiffworkflow-backend/bin/import_tickets_for_command_line.py @@ -28,8 +28,7 @@ def main(): with app.app_context(): process_model_identifier_ticket = "ticket" db.session.query(ProcessInstanceModel).filter( - ProcessInstanceModel.process_model_identifier - == process_model_identifier_ticket + ProcessInstanceModel.process_model_identifier == process_model_identifier_ticket ).delete() db.session.commit() @@ -60,9 +59,7 @@ def main(): header = next(reader) for column_name in columns_to_data_key_mappings: - columns_to_header_index_mappings[column_name] = header.index( - column_name - ) + columns_to_header_index_mappings[column_name] = header.index(column_name) id_index = header.index("ID") priority_index = header.index("Priority") print(f"header: {header}") @@ -87,9 +84,7 @@ def main(): desired_data_key, ) in columns_to_data_key_mappings.items(): appropriate_index = columns_to_header_index_mappings[column_name] - processor.bpmn_process_instance.data[desired_data_key] = row[ - appropriate_index - ] + processor.bpmn_process_instance.data[desired_data_key] = row[appropriate_index] print(f"datas: {processor.bpmn_process_instance.data}") if processor.bpmn_process_instance.data["month"] == "": diff --git a/spiffworkflow-backend/bin/import_tickets_for_script_task.py b/spiffworkflow-backend/bin/import_tickets_for_script_task.py index 1e9f6d19..9550699c 100644 --- a/spiffworkflow-backend/bin/import_tickets_for_script_task.py +++ b/spiffworkflow-backend/bin/import_tickets_for_script_task.py @@ -84,9 +84,7 @@ def main(): ) in columns_to_data_key_mappings.items(): appropriate_index = columns_to_header_index_mappings[column_name] print(f"appropriate_index: {appropriate_index}") - processor.bpmn_process_instance.data[desired_data_key] = row[ - appropriate_index - ] + processor.bpmn_process_instance.data[desired_data_key] = row[appropriate_index] # you at least need a month, or else this row in the csv is considered garbage month_value = processor.bpmn_process_instance.data["month"] diff --git a/spiffworkflow-backend/bin/save_all_bpmn.py b/spiffworkflow-backend/bin/save_all_bpmn.py index fd44bb54..95a181e8 100644 --- a/spiffworkflow-backend/bin/save_all_bpmn.py +++ b/spiffworkflow-backend/bin/save_all_bpmn.py @@ -13,8 +13,7 @@ def main() -> None: for bpmn_errors in failing_process_models: print(bpmn_errors) if ( - os.environ.get("SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS") - != "false" + os.environ.get("SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS") != "false" and len(failing_process_models) > 0 ): exit(1) diff --git a/spiffworkflow-backend/noxfile.py b/spiffworkflow-backend/noxfile.py index 632f33d4..f266e411 100644 --- a/spiffworkflow-backend/noxfile.py +++ b/spiffworkflow-backend/noxfile.py @@ -36,9 +36,7 @@ nox.options.sessions = ( def setup_database(session: Session) -> None: """Run database migrations against the database.""" - session.env["FLASK_INSTANCE_PATH"] = os.path.join( - os.getcwd(), "instance", "testing" - ) + session.env["FLASK_INSTANCE_PATH"] = os.path.join(os.getcwd(), "instance", "testing") flask_env_key = "FLASK_SESSION_SECRET_KEY" session.env[flask_env_key] = "e7711a3ba96c46c68e084a86952de16f" session.env["FLASK_APP"] = "src/spiffworkflow_backend" @@ -72,9 +70,7 @@ def activate_virtualenv_in_precommit_hooks(session: Session) -> None: text = hook.read_text() bindir = repr(session.bin)[1:-1] # strip quotes - if not ( - Path("A") == Path("a") and bindir.lower() in text.lower() or bindir in text - ): + if not (Path("A") == Path("a") and bindir.lower() in text.lower() or bindir in text): continue lines = text.splitlines() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py index d7041ecb..3e2191c8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py @@ -63,16 +63,12 @@ class MyJSONEncoder(DefaultJSONProvider): return super().dumps(obj, **kwargs) -def start_scheduler( - app: flask.app.Flask, scheduler_class: BaseScheduler = BackgroundScheduler -) -> None: +def start_scheduler(app: flask.app.Flask, scheduler_class: BaseScheduler = BackgroundScheduler) -> None: """Start_scheduler.""" scheduler = scheduler_class() # TODO: polling intervals for different jobs - polling_interval_in_seconds = app.config[ - "SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS" - ] + polling_interval_in_seconds = app.config["SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS"] # TODO: add job to release locks to simplify other queries # TODO: add job to delete completed entires # TODO: add job to run old/low priority instances so they do not get drowned out @@ -100,10 +96,7 @@ def should_start_scheduler(app: flask.app.Flask) -> bool: return False # do not start the scheduler twice in flask debug mode but support code reloading - if ( - app.config["ENV_IDENTIFIER"] != "local_development" - or os.environ.get("WERKZEUG_RUN_MAIN") != "true" - ): + if app.config["ENV_IDENTIFIER"] != "local_development" or os.environ.get("WERKZEUG_RUN_MAIN") != "true": return False return True @@ -126,9 +119,7 @@ def create_app() -> flask.app.Flask: # variable, it will be one thing when we run flask db upgrade in the # noxfile and another thing when the tests actually run. # instance_path is described more at https://flask.palletsprojects.com/en/2.1.x/config/ - connexion_app = connexion.FlaskApp( - __name__, server_args={"instance_path": os.environ.get("FLASK_INSTANCE_PATH")} - ) + connexion_app = connexion.FlaskApp(__name__, server_args={"instance_path": os.environ.get("FLASK_INSTANCE_PATH")}) app = connexion_app.app app.config["CONNEXION_APP"] = connexion_app app.config["SESSION_TYPE"] = "filesystem" @@ -145,8 +136,7 @@ def create_app() -> flask.app.Flask: # we will add an Access-Control-Max-Age header to the response to tell the browser it doesn't # need to continually keep asking for the same path. origins_re = [ - r"^https?:\/\/%s(.*)" % o.replace(".", r"\.") - for o in app.config["SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS"] + r"^https?:\/\/%s(.*)" % o.replace(".", r"\.") for o in app.config["SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS"] ] CORS(app, origins=origins_re, max_age=3600, supports_credentials=True) @@ -195,13 +185,9 @@ def get_hacked_up_app_for_script() -> flask.app.Flask: os.environ[flask_env_key] = "whatevs" if "SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR" not in os.environ: home = os.environ["HOME"] - full_process_model_path = ( - f"{home}/projects/github/sartography/sample-process-models" - ) + full_process_model_path = f"{home}/projects/github/sartography/sample-process-models" if os.path.isdir(full_process_model_path): - os.environ["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] = ( - full_process_model_path - ) + os.environ["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] = full_process_model_path else: raise Exception(f"Could not find {full_process_model_path}") app = create_app() @@ -245,21 +231,13 @@ def configure_sentry(app: flask.app.Flask) -> None: return None return event - sentry_errors_sample_rate = app.config.get( - "SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE" - ) + sentry_errors_sample_rate = app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE") if sentry_errors_sample_rate is None: - raise Exception( - "SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE is not set somehow" - ) + raise Exception("SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE is not set somehow") - sentry_traces_sample_rate = app.config.get( - "SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE" - ) + sentry_traces_sample_rate = app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE") if sentry_traces_sample_rate is None: - raise Exception( - "SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE is not set somehow" - ) + raise Exception("SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE is not set somehow") sentry_configs = { "dsn": app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_DSN"), @@ -284,8 +262,6 @@ def configure_sentry(app: flask.app.Flask) -> None: # but also we commented out profiling because it was causing segfaults (i guess it is marked experimental) profiles_sample_rate = 0 if sys.platform.startswith("win") else 1 if profiles_sample_rate > 0: - sentry_configs["_experiments"] = { - "profiles_sample_rate": profiles_sample_rate - } + sentry_configs["_experiments"] = {"profiles_sample_rate": profiles_sample_rate} sentry_sdk.init(**sentry_configs) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py index a9d99b95..7711c36f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py @@ -30,13 +30,9 @@ def setup_database_uri(app: Flask) -> None: db_pswd = app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD") if db_pswd is None: db_pswd = "" - app.config["SQLALCHEMY_DATABASE_URI"] = ( - f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}" - ) + app.config["SQLALCHEMY_DATABASE_URI"] = f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}" else: - app.config["SQLALCHEMY_DATABASE_URI"] = app.config.get( - "SPIFFWORKFLOW_BACKEND_DATABASE_URI" - ) + app.config["SQLALCHEMY_DATABASE_URI"] = app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") def load_config_file(app: Flask, env_config_module: str) -> None: @@ -45,30 +41,20 @@ def load_config_file(app: Flask, env_config_module: str) -> None: app.config.from_object(env_config_module) print(f"loaded config: {env_config_module}") except ImportStringError as exception: - if ( - os.environ.get("SPIFFWORKFLOW_BACKEND_TERRAFORM_DEPLOYED_ENVIRONMENT") - != "true" - ): - raise ModuleNotFoundError( - f"Cannot find config module: {env_config_module}" - ) from exception + if os.environ.get("SPIFFWORKFLOW_BACKEND_TERRAFORM_DEPLOYED_ENVIRONMENT") != "true": + raise ModuleNotFoundError(f"Cannot find config module: {env_config_module}") from exception def _set_up_tenant_specific_fields_as_list_of_strings(app: Flask) -> None: - tenant_specific_fields = app.config.get( - "SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS" - ) + tenant_specific_fields = app.config.get("SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS") if tenant_specific_fields is None or tenant_specific_fields == "": app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"] = [] else: - app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"] = ( - tenant_specific_fields.split(",") - ) + app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"] = tenant_specific_fields.split(",") if len(app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"]) > 3: raise ConfigurationError( - "SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS can have a" - " maximum of 3 fields" + "SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS can have a maximum of 3 fields" ) @@ -80,9 +66,7 @@ def setup_config(app: Flask) -> None: except OSError: pass - app.config["ENV_IDENTIFIER"] = os.environ.get( - "SPIFFWORKFLOW_BACKEND_ENV", "local_development" - ) + app.config["ENV_IDENTIFIER"] = os.environ.get("SPIFFWORKFLOW_BACKEND_ENV", "local_development") app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False load_config_file(app, "spiffworkflow_backend.config.default") @@ -99,10 +83,7 @@ def setup_config(app: Flask) -> None: # This allows config/testing.py or instance/config.py to override the default config if "ENV_IDENTIFIER" in app.config and app.config["ENV_IDENTIFIER"] == "testing": app.config.from_pyfile("config/testing.py", silent=True) - elif ( - "ENV_IDENTIFIER" in app.config - and app.config["ENV_IDENTIFIER"] == "unit_testing" - ): + elif "ENV_IDENTIFIER" in app.config and app.config["ENV_IDENTIFIER"] == "unit_testing": app.config.from_pyfile("config/unit_testing.py", silent=True) else: app.config.from_pyfile(f"{app.instance_path}/config.py", silent=True) @@ -125,15 +106,10 @@ def setup_config(app: Flask) -> None: app.config.from_pyfile(os.path.join("config", "secrets.py"), silent=True) if app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] is None: - raise ConfigurationError( - "SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR config must be set" - ) + raise ConfigurationError("SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR config must be set") if app.config["FLASK_SESSION_SECRET_KEY"] is None: - raise KeyError( - "Cannot find the secret_key from the environment. Please set" - " FLASK_SESSION_SECRET_KEY" - ) + raise KeyError("Cannot find the secret_key from the environment. Please set FLASK_SESSION_SECRET_KEY") app.secret_key = os.environ.get("FLASK_SESSION_SECRET_KEY") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py index 61a89f97..ca808564 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py @@ -8,9 +8,7 @@ from os import environ FLASK_SESSION_SECRET_KEY = environ.get("FLASK_SESSION_SECRET_KEY") -SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR = environ.get( - "SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR" -) +SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR = environ.get("SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR") cors_allow_all = "*" SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS = re.split( r",\s*", @@ -18,8 +16,7 @@ SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS = re.split( ) SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = ( - environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") - == "true" + environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") == "true" ) SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS = int( environ.get( @@ -30,9 +27,7 @@ SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS = int( SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = environ.get( "SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND", default="http://localhost:7001" ) -SPIFFWORKFLOW_BACKEND_URL = environ.get( - "SPIFFWORKFLOW_BACKEND_URL", default="http://localhost:7000" -) +SPIFFWORKFLOW_BACKEND_URL = environ.get("SPIFFWORKFLOW_BACKEND_URL", default="http://localhost:7000") # service task connector proxy SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = environ.get( "SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL", default="http://localhost:7004" @@ -68,18 +63,12 @@ SPIFFWORKFLOW_BACKEND_ENCRYPTION_LIB = environ.get( default="no_op_cipher", ) -SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = ( - environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="false") == "true" -) +SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="false") == "true" -SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( - "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME" -) +SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get("SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME") # Sentry Configuration -SPIFFWORKFLOW_BACKEND_SENTRY_DSN = environ.get( - "SPIFFWORKFLOW_BACKEND_SENTRY_DSN", default="" -) +SPIFFWORKFLOW_BACKEND_SENTRY_DSN = environ.get("SPIFFWORKFLOW_BACKEND_SENTRY_DSN", default="") SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE = environ.get( "SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE", default="1" ) # send all errors @@ -89,43 +78,28 @@ SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE = environ.get( SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG = environ.get( "SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG", default=None ) -SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG = environ.get( - "SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG", default=None -) +SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG = environ.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG", default=None) SPIFFWORKFLOW_BACKEND_SENTRY_PROFILING_ENABLED = ( - environ.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROFILING_ENABLED", default="false") - == "true" + environ.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROFILING_ENABLED", default="false") == "true" ) -SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get( - "SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="info" -) +SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get("SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="info") # When a user clicks on the `Publish` button, this is the default branch this server merges into. # I.e., dev server could have `staging` here. Staging server might have `production` here. -SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH = environ.get( - "SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH" -) +SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH = environ.get("SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH") # This is the branch that the app automatically commits to every time the user clicks the save button # or otherwise changes a process model. # If publishing is enabled, the contents of this "staging area" / "scratch pad" / WIP spot will be used # as the relevant contents for process model that the user wants to publish. -SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get( - "SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH" -) -SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get( - "SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL" -) +SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get("SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH") +SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get("SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL") SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = ( environ.get("SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE", default="false") == "true" ) SPIFFWORKFLOW_BACKEND_GIT_USERNAME = environ.get("SPIFFWORKFLOW_BACKEND_GIT_USERNAME") -SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = environ.get( - "SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL" -) -SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET = environ.get( - "SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET", default=None -) +SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = environ.get("SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL") +SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET = environ.get("SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET", default=None) SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH = environ.get( "SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH", default=None ) @@ -135,23 +109,17 @@ SPIFFWORKFLOW_BACKEND_DATABASE_TYPE = environ.get( "SPIFFWORKFLOW_BACKEND_DATABASE_TYPE", default="mysql" ) # can also be sqlite, postgres # Overide above with specific sqlalchymy connection string. -SPIFFWORKFLOW_BACKEND_DATABASE_URI = environ.get( - "SPIFFWORKFLOW_BACKEND_DATABASE_URI", default=None -) +SPIFFWORKFLOW_BACKEND_DATABASE_URI = environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI", default=None) SPIFFWORKFLOW_BACKEND_SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID = environ.get( "SPIFFWORKFLOW_BACKEND_SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID", default="Message_SystemMessageNotification", ) SPIFFWORKFLOW_BACKEND_ALLOW_CONFISCATING_LOCK_AFTER_SECONDS = int( - environ.get( - "SPIFFWORKFLOW_BACKEND_ALLOW_CONFISCATING_LOCK_AFTER_SECONDS", default="600" - ) + environ.get("SPIFFWORKFLOW_BACKEND_ALLOW_CONFISCATING_LOCK_AFTER_SECONDS", default="600") ) -SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP = environ.get( - "SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP", default="everybody" -) +SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP = environ.get("SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP", default="everybody") SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND = environ.get( "SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND", default="greedy" @@ -162,6 +130,4 @@ SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB = environ.get( ) # this is only used in CI. use SPIFFWORKFLOW_BACKEND_DATABASE_URI instead for real configuration -SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD = environ.get( - "SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD", default=None -) +SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD = environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD", default=None) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/demo.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/demo.py index aec6a03b..c9694489 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/demo.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/demo.py @@ -10,6 +10,5 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( ) SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = ( - environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") - == "true" + environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") == "true" ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/local_development.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/local_development.py index 197637b4..0df353d8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/local_development.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/local_development.py @@ -5,19 +5,14 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="local_development.yml" ) -SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get( - "SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug" -) +SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get("SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug") SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = ( - environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") - == "true" + environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") == "true" ) SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get( "SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL", default="https://github.com/sartography/sample-process-models.git", ) SPIFFWORKFLOW_BACKEND_GIT_USERNAME = "sartography-automated-committer" -SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = ( - f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com" -) +SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/qa2.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/qa2.py index f81d8864..b5ac6cee 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/qa2.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/qa2.py @@ -5,10 +5,6 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="qa1.yml" ) SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = "https://qa2.spiffworkflow.org" -SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = ( - "https://qa2.spiffworkflow.org/keycloak/realms/spiffworkflow" -) +SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = "https://qa2.spiffworkflow.org/keycloak/realms/spiffworkflow" SPIFFWORKFLOW_BACKEND_URL = "https://qa2.spiffworkflow.org/api" -SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = ( - "https://qa2.spiffworkflow.org/connector-proxy" -) +SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = "https://qa2.spiffworkflow.org/connector-proxy" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/sartography.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/sartography.py index 08368474..8dd2e1a0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/sartography.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/sartography.py @@ -3,12 +3,9 @@ from os import environ environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEND_ENV"] SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = ( - f"https://keycloak.{environment_identifier_for_this_config_file_only}" - ".spiffworkflow.org/realms/sartography" -) -SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get( - "SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH", default="main" + f"https://keycloak.{environment_identifier_for_this_config_file_only}.spiffworkflow.org/realms/sartography" ) +SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get("SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH", default="main") SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get( "SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL", default="https://github.com/sartography/sartography-process-models.git", diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py index edfe36d7..55df0c16 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py @@ -1,9 +1,7 @@ """Staging.""" from os import environ -SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get( - "SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH", default="staging" -) +SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get("SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH", default="staging") SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH = environ.get( "SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH", default="main" ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/terraform_deployed_environment.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/terraform_deployed_environment.py index 20c5524c..1585b577 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/terraform_deployed_environment.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/terraform_deployed_environment.py @@ -6,36 +6,29 @@ environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEN SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = True SPIFFWORKFLOW_BACKEND_GIT_USERNAME = "sartography-automated-committer" -SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = ( - f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com" -) +SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com" SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="terraform_deployed_environment.yml", ) SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = ( - environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") - == "true" + environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") == "true" ) SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = environ.get( "SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL", default=( - f"https://keycloak.{environment_identifier_for_this_config_file_only}" - ".spiffworkflow.org/realms/spiffworkflow" + f"https://keycloak.{environment_identifier_for_this_config_file_only}.spiffworkflow.org/realms/spiffworkflow" ), ) SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = ( f"https://{environment_identifier_for_this_config_file_only}.spiffworkflow.org" ) -SPIFFWORKFLOW_BACKEND_URL = ( - f"https://api.{environment_identifier_for_this_config_file_only}.spiffworkflow.org" -) +SPIFFWORKFLOW_BACKEND_URL = f"https://api.{environment_identifier_for_this_config_file_only}.spiffworkflow.org" SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = ( - f"https://connector-proxy.{environment_identifier_for_this_config_file_only}" - ".spiffworkflow.org" + f"https://connector-proxy.{environment_identifier_for_this_config_file_only}.spiffworkflow.org" ) SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get( "SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL", diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/unit_testing.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/unit_testing.py index e486fe76..de94d79f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/unit_testing.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/unit_testing.py @@ -4,17 +4,13 @@ from os import environ TESTING = True SECRET_KEY = "the_secret_key" -SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = ( - environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="true") == "true" -) +SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="true") == "true" SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="unit_testing.yml" ) -SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get( - "SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug" -) +SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get("SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug") SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = False # NOTE: set this here since nox shoves tests and src code to diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py index ca8c5125..f6b2d391 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py @@ -202,20 +202,13 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response: if isinstance(exception, ApiError): current_app.logger.info( - f"Sending ApiError exception to sentry: {exception} with error code" - f" {exception.error_code}" + f"Sending ApiError exception to sentry: {exception} with error code {exception.error_code}" ) - organization_slug = current_app.config.get( - "SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG" - ) - project_slug = current_app.config.get( - "SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG" - ) + organization_slug = current_app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG") + project_slug = current_app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG") if organization_slug and project_slug: - sentry_link = ( - f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}" - ) + sentry_link = f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}" # !!!NOTE!!!: do this after sentry stuff since calling logger.exception # seems to break the sentry sdk context where we no longer get back diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py index f7e301e4..1eaf200d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py @@ -18,9 +18,7 @@ class BpmnProcessModel(SpiffworkflowBaseDBModel): id: int = db.Column(db.Integer, primary_key=True) guid: str | None = db.Column(db.String(36), nullable=True, unique=True, index=True) - parent_process_id: int | None = db.Column( - ForeignKey("bpmn_process.id"), nullable=True - ) + parent_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True) properties_json: dict = db.Column(db.JSON, nullable=False) json_data_hash: str = db.Column(db.String(255), nullable=False, index=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/db.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/db.py index 5028ad1d..a91ab83e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/db.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/db.py @@ -39,16 +39,12 @@ class SpiffworkflowBaseDBModel(db.Model): # type: ignore children.append(subclass) return result - def validate_enum_field( - self, key: str, value: Any, enum_variable: enum.EnumMeta - ) -> Any: + def validate_enum_field(self, key: str, value: Any, enum_variable: enum.EnumMeta) -> Any: """Validate_enum_field.""" try: m_type = getattr(enum_variable, value, None) except Exception as e: - raise ValueError( - f"{self.__class__.__name__}: invalid {key}: {value}" - ) from e + raise ValueError(f"{self.__class__.__name__}: invalid {key}: {value}") from e if m_type is None: raise ValueError(f"{self.__class__.__name__}: invalid {key}: {value}") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/file.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/file.py index 449c145d..eb8d706d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/file.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/file.py @@ -126,6 +126,4 @@ class FileSchema(Schema): "process_model_id", ] unknown = INCLUDE - references = marshmallow.fields.List( - marshmallow.fields.Nested("SpecReferenceSchema") - ) + references = marshmallow.fields.List(marshmallow.fields.Nested("SpecReferenceSchema")) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/group.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/group.py index f1017df9..8cd04681 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/group.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/group.py @@ -30,9 +30,7 @@ class GroupModel(SpiffworkflowBaseDBModel): identifier = db.Column(db.String(255)) user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete") - user_group_assignments_waiting = relationship( # type: ignore - "UserGroupAssignmentWaitingModel", cascade="delete" - ) + user_group_assignments_waiting = relationship("UserGroupAssignmentWaitingModel", cascade="delete") # type: ignore users = relationship( # type: ignore "UserModel", viewonly=True, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py index 6e5a3a69..e1ecd1d1 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py @@ -28,15 +28,11 @@ class HumanTaskModel(SpiffworkflowBaseDBModel): __tablename__ = "human_task" id: int = db.Column(db.Integer, primary_key=True) - process_instance_id: int = db.Column( - ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore - ) + process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore lane_assignment_id: int | None = db.Column(ForeignKey(GroupModel.id)) completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True) # type: ignore - completed_by_user = relationship( - "UserModel", foreign_keys=[completed_by_user_id], viewonly=True - ) + completed_by_user = relationship("UserModel", foreign_keys=[completed_by_user_id], viewonly=True) actual_owner_id: int = db.Column(ForeignKey(UserModel.id)) # type: ignore # actual_owner: RelationshipProperty[UserModel] = relationship(UserModel) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py index 1e483177..b2219bf4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py @@ -27,9 +27,7 @@ class HumanTaskUserModel(SpiffworkflowBaseDBModel): ) id = db.Column(db.Integer, primary_key=True) - human_task_id = db.Column( - ForeignKey(HumanTaskModel.id), nullable=False, index=True # type: ignore - ) + human_task_id = db.Column(ForeignKey(HumanTaskModel.id), nullable=False, index=True) # type: ignore user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore human_task = relationship(HumanTaskModel) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/json_data.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/json_data.py index 95993e2e..3253997a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/json_data.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/json_data.py @@ -34,13 +34,9 @@ class JsonDataModel(SpiffworkflowBaseDBModel): @classmethod def find_object_by_hash(cls, hash: str) -> JsonDataModel: - json_data_model: JsonDataModel | None = JsonDataModel.query.filter_by( - hash=hash - ).first() + json_data_model: JsonDataModel | None = JsonDataModel.query.filter_by(hash=hash).first() if json_data_model is None: - raise JsonDataModelNotFoundError( - f"Could not find a json data model entry with hash: {hash}" - ) + raise JsonDataModelNotFoundError(f"Could not find a json data model entry with hash: {hash}") return json_data_model @classmethod diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance.py index 9cf4ad98..31de7cd4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance.py @@ -63,9 +63,7 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel): failure_cause: str = db.Column(db.Text()) updated_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer) - correlation_rules = relationship( - "MessageInstanceCorrelationRuleModel", back_populates="message_instance" - ) + correlation_rules = relationship("MessageInstanceCorrelationRuleModel", back_populates="message_instance") @validates("message_type") def validate_message_type(self, key: str, value: Any) -> Any: @@ -94,10 +92,7 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel): return False if not self.is_receive(): return False - if ( - isinstance(self.correlation_keys, dict) - and self.correlation_keys == other.correlation_keys - ): + if isinstance(self.correlation_keys, dict) and self.correlation_keys == other.correlation_keys: # We know we have a match, and we can just return if we don't have to figure out the key return True @@ -107,9 +102,7 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel): # Loop over the receives' correlation keys - if any of the keys fully match, then we match. for expected_values in self.correlation_keys.values(): - if self.payload_matches_expected_values( - other.payload, expected_values, expression_engine - ): + if self.payload_matches_expected_values(other.payload, expected_values, expression_engine): return True return False @@ -128,23 +121,17 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel): """Compares the payload of a 'send' message against a single correlation key's expected values.""" for correlation_key in self.correlation_rules: expected_value = expected_values.get(correlation_key.name, None) - if ( - expected_value is None - ): # This key is not required for this instance to match. + if expected_value is None: # This key is not required for this instance to match. continue try: - result = expression_engine._evaluate( - correlation_key.retrieval_expression, payload - ) + result = expression_engine._evaluate(correlation_key.retrieval_expression, payload) except Exception as e: # the failure of a payload evaluation may not mean that matches for these # message instances can't happen with other messages. So don't error up. # fixme: Perhaps log some sort of error. current_app.logger.warning( - "Error evaluating correlation key when comparing send and receive" - " messages." - + f"Expression {correlation_key.retrieval_expression} failed with" - " the error " + "Error evaluating correlation key when comparing send and receive messages." + + f"Expression {correlation_key.retrieval_expression} failed with the error " + str(e) ) return False @@ -168,7 +155,4 @@ def ensure_failure_cause_is_set_if_message_instance_failed( for instance in session.new: if isinstance(instance, MessageInstanceModel): if instance.status == "failed" and instance.failure_cause is None: - raise ValueError( - f"{instance.__class__.__name__}: failure_cause must be set if" - " status is failed" - ) + raise ValueError(f"{instance.__class__.__name__}: failure_cause must be set if status is failed") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance_correlation.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance_correlation.py index 7431a273..92ab8f14 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance_correlation.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance_correlation.py @@ -29,13 +29,9 @@ class MessageInstanceCorrelationRuleModel(SpiffworkflowBaseDBModel): ) id = db.Column(db.Integer, primary_key=True) - message_instance_id = db.Column( - ForeignKey(MessageInstanceModel.id), nullable=False, index=True # type: ignore - ) + message_instance_id = db.Column(ForeignKey(MessageInstanceModel.id), nullable=False, index=True) # type: ignore name: str = db.Column(db.String(50), nullable=False) retrieval_expression: str = db.Column(db.String(255)) updated_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer) - message_instance = relationship( - "MessageInstanceModel", back_populates="correlation_rules" - ) + message_instance = relationship("MessageInstanceModel", back_populates="correlation_rules") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_assignment.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_assignment.py index a9db96cf..01d4b935 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_assignment.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_assignment.py @@ -47,9 +47,7 @@ class PermissionAssignmentModel(SpiffworkflowBaseDBModel): ) id = db.Column(db.Integer, primary_key=True) principal_id = db.Column(ForeignKey(PrincipalModel.id), nullable=False) - permission_target_id = db.Column( - ForeignKey(PermissionTargetModel.id), nullable=False # type: ignore - ) + permission_target_id = db.Column(ForeignKey(PermissionTargetModel.id), nullable=False) # type: ignore grant_type = db.Column(db.String(50), nullable=False) permission = db.Column(db.String(50), nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_target.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_target.py index 773833a3..35a81d59 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_target.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_target.py @@ -35,7 +35,5 @@ class PermissionTargetModel(SpiffworkflowBaseDBModel): def validate_uri(self, key: str, value: str) -> str: """Validate_uri.""" if re.search(r"%.", value): - raise InvalidPermissionTargetUriError( - f"Wildcard must appear at end: {value}" - ) + raise InvalidPermissionTargetUriError(f"Wildcard must appear at end: {value}") return value diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_group.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_group.py index 63c851a5..eb06116c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_group.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_group.py @@ -26,9 +26,7 @@ class ProcessGroup: description: str | None = None display_order: int | None = 0 admin: bool | None = False - process_models: list[ProcessModelInfo] = field( - default_factory=list[ProcessModelInfo] - ) + process_models: list[ProcessModelInfo] = field(default_factory=list[ProcessModelInfo]) process_groups: list[ProcessGroup] = field(default_factory=list["ProcessGroup"]) parent_groups: list[ProcessGroupLite] | None = None @@ -74,17 +72,13 @@ class ProcessGroupSchema(Schema): ] process_models = marshmallow.fields.List( - marshmallow.fields.Nested( - "ProcessModelInfoSchema", dump_only=True, required=False - ) + marshmallow.fields.Nested("ProcessModelInfoSchema", dump_only=True, required=False) ) process_groups = marshmallow.fields.List( marshmallow.fields.Nested("ProcessGroupSchema", dump_only=True, required=False) ) @post_load - def make_process_group( - self, data: dict[str, str | bool | int], **kwargs: dict - ) -> ProcessGroup: + def make_process_group(self, data: dict[str, str | bool | int], **kwargs: dict) -> ProcessGroup: """Make_process_group.""" return ProcessGroup(**data) # type: ignore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py index dc66c86f..6f1ec1b6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py @@ -55,12 +55,8 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): __tablename__ = "process_instance" id: int = db.Column(db.Integer, primary_key=True) - process_model_identifier: str = db.Column( - db.String(255), nullable=False, index=True - ) - process_model_display_name: str = db.Column( - db.String(255), nullable=False, index=True - ) + process_model_identifier: str = db.Column(db.String(255), nullable=False, index=True) + process_model_display_name: str = db.Column(db.String(255), nullable=False, index=True) process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore process_initiator = relationship("UserModel") @@ -68,9 +64,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): ForeignKey(BpmnProcessDefinitionModel.id), nullable=True # type: ignore ) bpmn_process_definition = relationship(BpmnProcessDefinitionModel) - bpmn_process_id: int | None = db.Column( - ForeignKey(BpmnProcessModel.id), nullable=True # type: ignore - ) + bpmn_process_id: int | None = db.Column(ForeignKey(BpmnProcessModel.id), nullable=True) # type: ignore bpmn_process = relationship(BpmnProcessModel, cascade="delete") tasks = relationship("TaskModel", cascade="delete") # type: ignore @@ -79,8 +73,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): active_human_tasks = relationship( "HumanTaskModel", primaryjoin=( - "and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id," - " HumanTaskModel.completed == False)" + "and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id, HumanTaskModel.completed == False)" ), ) # type: ignore @@ -242,9 +235,7 @@ class ProcessInstanceApiSchema(Schema): next_task = marshmallow.fields.Nested(TaskSchema, dump_only=True, required=False) @marshmallow.post_load - def make_process_instance( - self, data: dict[str, Any], **kwargs: dict - ) -> ProcessInstanceApi: + def make_process_instance(self, data: dict[str, Any], **kwargs: dict) -> ProcessInstanceApi: """Make_process_instance.""" keys = [ "id", diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_file_data.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_file_data.py index e0d5dcb8..5d3567ad 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_file_data.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_file_data.py @@ -17,17 +17,13 @@ class ProcessInstanceFileDataModel(SpiffworkflowBaseDBModel): __tablename__ = "process_instance_file_data" id: int = db.Column(db.Integer, primary_key=True) - process_instance_id: int = db.Column( - ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore - ) + process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore identifier: str = db.Column(db.String(255), nullable=False) list_index: Optional[int] = db.Column(db.Integer, nullable=True) mimetype: str = db.Column(db.String(255), nullable=False) filename: str = db.Column(db.String(255), nullable=False) # this is not deferred because there is no reason to query this model if you do not want the contents - contents: str = db.Column( - db.LargeBinary().with_variant(LONGBLOB, "mysql"), nullable=False - ) + contents: str = db.Column(db.LargeBinary().with_variant(LONGBLOB, "mysql"), nullable=False) digest: str = db.Column(db.String(64), nullable=False, index=True) updated_at_in_seconds: int = db.Column(db.Integer, nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py index 920e13a2..b5e88ff8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py @@ -13,16 +13,10 @@ class ProcessInstanceMetadataModel(SpiffworkflowBaseDBModel): """ProcessInstanceMetadataModel.""" __tablename__ = "process_instance_metadata" - __table_args__ = ( - db.UniqueConstraint( - "process_instance_id", "key", name="process_instance_metadata_unique" - ), - ) + __table_args__ = (db.UniqueConstraint("process_instance_id", "key", name="process_instance_metadata_unique"),) id: int = db.Column(db.Integer, primary_key=True) - process_instance_id: int = db.Column( - ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore - ) + process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore key: str = db.Column(db.String(255), nullable=False, index=True) value: str = db.Column(db.String(255), nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_queue.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_queue.py index ff81cf86..c0cb9f27 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_queue.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_queue.py @@ -22,9 +22,7 @@ class ProcessInstanceQueueModel(SpiffworkflowBaseDBModel): run_at_in_seconds: int = db.Column(db.Integer) priority: int = db.Column(db.Integer) locked_by: Union[str, None] = db.Column(db.String(80), index=True, nullable=True) - locked_at_in_seconds: Union[int, None] = db.Column( - db.Integer, index=True, nullable=True - ) + locked_at_in_seconds: Union[int, None] = db.Column(db.Integer, index=True, nullable=True) status: str = db.Column(db.String(50), index=True) updated_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py index a8787da6..ade1f60d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py @@ -187,9 +187,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel): {"Header": "priority", "accessor": "priority"}, ], "order": "month asc", - "filter_by": [ - {"field_name": "month", "operator": "equals", "field_value": "3"} - ], + "filter_by": [{"field_name": "month", "operator": "equals", "field_value": "3"}], } @classmethod @@ -233,25 +231,19 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel): if substitution_variables is not None: for key, value in substitution_variables.items(): if isinstance(value, str) or isinstance(value, int): - field_value = str(field_value).replace( - "{{" + key + "}}", str(value) - ) + field_value = str(field_value).replace("{{" + key + "}}", str(value)) return field_value # modeled after https://github.com/suyash248/sqlalchemy-json-querybuilder # just supports "equals" operator for now. # perhaps we will use the database instead of filtering in memory in the future and then we might use this lib directly. - def passes_filter( - self, process_instance_dict: dict, substitution_variables: dict - ) -> bool: + def passes_filter(self, process_instance_dict: dict, substitution_variables: dict) -> bool: """Passes_filter.""" if "filter_by" in self.report_metadata: for filter_by in self.report_metadata["filter_by"]: field_name = filter_by["field_name"] operator = filter_by["operator"] - field_value = self.with_substitutions( - filter_by["field_value"], substitution_variables - ) + field_value = self.with_substitutions(filter_by["field_value"], substitution_variables) if operator == "equals": if str(process_instance_dict.get(field_name)) != str(field_value): return False @@ -274,9 +266,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel): sort_value = process_instance_dict.get(order_by_item) comparison_values.append(Reversor(sort_value)) else: - sort_value = cast( - Optional[str], process_instance_dict.get(order_by_item) - ) + sort_value = cast(Optional[str], process_instance_dict.get(order_by_item)) comparison_values.append(sort_value) return comparison_values @@ -307,20 +297,14 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel): results = self.order_things(results) if "columns" in self.report_metadata: - column_keys_to_keep = [ - c["accessor"] for c in self.report_metadata["columns"] - ] + column_keys_to_keep = [c["accessor"] for c in self.report_metadata["columns"]] pruned_results = [] for result in results: dict_you_want = { - your_key: result[your_key] - for your_key in column_keys_to_keep - if result.get(your_key) + your_key: result[your_key] for your_key in column_keys_to_keep if result.get(your_key) } pruned_results.append(dict_you_want) results = pruned_results - return ProcessInstanceReportResult( - report_metadata=self.report_metadata, results=results - ) + return ProcessInstanceReportResult(report_metadata=self.report_metadata, results=results) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py index 8ae6595c..c1f57fbb 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py @@ -89,9 +89,7 @@ class ProcessModelInfoSchema(Schema): primary_process_id = marshmallow.fields.String(allow_none=True) files = marshmallow.fields.List(marshmallow.fields.Nested("FileSchema")) fault_or_suspend_on_exception = marshmallow.fields.String() - exception_notification_addresses = marshmallow.fields.List( - marshmallow.fields.String - ) + exception_notification_addresses = marshmallow.fields.List(marshmallow.fields.String) metadata_extraction_paths = marshmallow.fields.List( marshmallow.fields.Dict( keys=marshmallow.fields.Str(required=False), @@ -101,8 +99,6 @@ class ProcessModelInfoSchema(Schema): ) @post_load - def make_spec( - self, data: dict[str, str | bool | int | NotificationType], **_: Any - ) -> ProcessModelInfo: + def make_spec(self, data: dict[str, str | bool | int | NotificationType], **_: Any) -> ProcessModelInfo: """Make_spec.""" return ProcessModelInfo(**data) # type: ignore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/spec_reference.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/spec_reference.py index 090cf70a..cfc78686 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/spec_reference.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/spec_reference.py @@ -41,9 +41,7 @@ class SpecReferenceCache(SpiffworkflowBaseDBModel): """A cache of information about all the Processes and Decisions defined in all files.""" __tablename__ = "spec_reference_cache" - __table_args__ = ( - UniqueConstraint("identifier", "type", name="_identifier_type_unique"), - ) + __table_args__ = (UniqueConstraint("identifier", "type", name="_identifier_type_unique"),) id = db.Column(db.Integer, primary_key=True) identifier = db.Column(db.String(255), index=True) display_name = db.Column(db.String(255), index=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py index 713bd3cd..beed8da7 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py @@ -16,16 +16,10 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel): """SpiffStepDetailsModel.""" __tablename__ = "spiff_step_details" - __table_args__ = ( - UniqueConstraint( - "process_instance_id", "spiff_step", name="process_instance_id_spiff_step" - ), - ) + __table_args__ = (UniqueConstraint("process_instance_id", "spiff_step", name="process_instance_id_spiff_step"),) id: int = db.Column(db.Integer, primary_key=True) - process_instance_id: int = db.Column( - ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore - ) + process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore spiff_step: int = db.Column(db.Integer, nullable=False) task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore task_id: str = db.Column(db.String(50), nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index fc0d3262..75320e4d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -45,17 +45,11 @@ class TaskModel(SpiffworkflowBaseDBModel): __tablename__ = "task" id: int = db.Column(db.Integer, primary_key=True) guid: str = db.Column(db.String(36), nullable=False, unique=True, index=True) - bpmn_process_id: int = db.Column( - ForeignKey(BpmnProcessModel.id), nullable=False # type: ignore - ) - process_instance_id: int = db.Column( - ForeignKey("process_instance.id"), nullable=False - ) + bpmn_process_id: int = db.Column(ForeignKey(BpmnProcessModel.id), nullable=False) # type: ignore + process_instance_id: int = db.Column(ForeignKey("process_instance.id"), nullable=False) # find this by looking up the "workflow_name" and "task_spec" from the properties_json - task_definition_id: int = db.Column( - ForeignKey(TaskDefinitionModel.id), nullable=False # type: ignore - ) + task_definition_id: int = db.Column(ForeignKey(TaskDefinitionModel.id), nullable=False) # type: ignore task_definition = relationship("TaskDefinitionModel") state: str = db.Column(db.String(10), nullable=False) @@ -137,15 +131,9 @@ class Task: self.form_schema = form_schema self.form_ui_schema = form_ui_schema - self.multi_instance_type = ( - multi_instance_type # Some tasks have a repeat behavior. - ) - self.multi_instance_count = ( - multi_instance_count # This is the number of times the task could repeat. - ) - self.multi_instance_index = ( - multi_instance_index # And the index of the currently repeating task. - ) + self.multi_instance_type = multi_instance_type # Some tasks have a repeat behavior. + self.multi_instance_count = multi_instance_count # This is the number of times the task could repeat. + self.multi_instance_index = multi_instance_index # And the index of the currently repeating task. self.process_identifier = process_identifier self.properties = properties # Arbitrary extension properties from BPMN editor. @@ -243,9 +231,7 @@ class FormFieldSchema(Schema): default_value = marshmallow.fields.String(required=False, allow_none=True) options = marshmallow.fields.List(marshmallow.fields.Nested(OptionSchema)) validation = marshmallow.fields.List(marshmallow.fields.Nested(ValidationSchema)) - properties = marshmallow.fields.List( - marshmallow.fields.Nested(FormFieldPropertySchema) - ) + properties = marshmallow.fields.List(marshmallow.fields.Nested(FormFieldPropertySchema)) # class FormSchema(Schema): diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py index f32a35d7..4b55e8b6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py @@ -29,9 +29,7 @@ class UserModel(SpiffworkflowBaseDBModel): id: int = db.Column(db.Integer, primary_key=True) username: str = db.Column(db.String(255), nullable=False, unique=True) - service = db.Column( - db.String(255), nullable=False, unique=False - ) # not 'openid' -- google, aws + service = db.Column(db.String(255), nullable=False, unique=False) # not 'openid' -- google, aws service_id = db.Column(db.String(255), nullable=False, unique=False) display_name = db.Column(db.String(255)) email = db.Column(db.String(255)) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment.py index acd6c30b..45467a81 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment.py @@ -12,9 +12,7 @@ class UserGroupAssignmentModel(SpiffworkflowBaseDBModel): """UserGroupAssignmentModel.""" __tablename__ = "user_group_assignment" - __table_args__ = ( - db.UniqueConstraint("user_id", "group_id", name="user_group_assignment_unique"), - ) + __table_args__ = (db.UniqueConstraint("user_id", "group_id", name="user_group_assignment_unique"),) id = db.Column(db.Integer, primary_key=True) user_id = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment_waiting.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment_waiting.py index 7db1676f..5616728b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment_waiting.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment_waiting.py @@ -15,11 +15,7 @@ class UserGroupAssignmentWaitingModel(SpiffworkflowBaseDBModel): MATCH_ALL_USERS = "*" __tablename__ = "user_group_assignment_waiting" - __table_args__ = ( - db.UniqueConstraint( - "username", "group_id", name="user_group_assignment_staged_unique" - ), - ) + __table_args__ = (db.UniqueConstraint("username", "group_id", name="user_group_assignment_staged_unique"),) id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(255), nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py index 1c86fddb..7cd65a37 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py @@ -28,9 +28,7 @@ def message_instance_list( message_instances_query = MessageInstanceModel.query if process_instance_id: - message_instances_query = message_instances_query.filter_by( - process_instance_id=process_instance_id - ) + message_instances_query = message_instances_query.filter_by(process_instance_id=process_instance_id) message_instances = ( message_instances_query.order_by( @@ -70,10 +68,7 @@ def message_send( raise ( ApiError( error_code="missing_payload", - message=( - "Please include a 'payload' in the JSON body that contains the" - " message contents." - ), + message="Please include a 'payload' in the JSON body that contains the message contents.", status_code=400, ) ) @@ -111,9 +106,7 @@ def message_send( ) ) - process_instance = ProcessInstanceModel.query.filter_by( - id=receiver_message.process_instance_id - ).first() + process_instance = ProcessInstanceModel.query.filter_by(id=receiver_message.process_instance_id).first() return Response( json.dumps(ProcessInstanceModelSchema().dump(process_instance)), status=200, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py index f25100ee..08be9ff1 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py @@ -20,9 +20,7 @@ from flask import request from flask import url_for from werkzeug.wrappers import Response -openid_blueprint = Blueprint( - "openid", __name__, template_folder="templates", static_folder="static" -) +openid_blueprint = Blueprint("openid", __name__, template_folder="templates", static_folder="static") OPEN_ID_CODE = ":this_is_not_secure_do_not_use_in_production" @@ -60,10 +58,7 @@ def auth() -> str: def form_submit() -> Any: """Handles the login form submission.""" users = get_users() - if ( - request.values["Uname"] in users - and request.values["Pass"] == users[request.values["Uname"]]["password"] - ): + if request.values["Uname"] in users and request.values["Pass"] == users[request.values["Uname"]]["password"]: # Redirect back to the end user with some detailed information state = request.values.get("state") data = { diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 81272270..32becbc6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -46,9 +46,7 @@ def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.R raise ( ApiError( error_code="could_not_requests_to_check", - message=( - "The key 'requests_to_check' not found at root of request body." - ), + message="The key 'requests_to_check' not found at root of request body.", status_code=400, ) ) @@ -60,9 +58,7 @@ def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.R response_dict[target_uri] = {} for http_method in http_methods: - permission_string = AuthorizationService.get_permission_from_http_method( - http_method - ) + permission_string = AuthorizationService.get_permission_from_http_method(http_method) if permission_string: has_permission = AuthorizationService.user_has_permission( user=g.user, @@ -98,10 +94,7 @@ def _process_data_fetcher( if file_data is None: raise ApiError( error_code="process_instance_file_data_not_found", - message=( - "Could not find file data related to the digest:" - f" {process_data_identifier}" - ), + message=f"Could not find file data related to the digest: {process_data_identifier}", ) mimetype = file_data.mimetype filename = file_data.filename @@ -169,9 +162,7 @@ def github_webhook_receive(body: Dict) -> Response: auth_header = request.headers.get("X-Hub-Signature-256") AuthorizationService.verify_sha256_token(auth_header) result = GitService.handle_web_hook(body) - return Response( - json.dumps({"git_pull": result}), status=200, mimetype="application/json" - ) + return Response(json.dumps({"git_pull": result}), status=200, mimetype="application/json") def task_data_update( @@ -181,9 +172,7 @@ def task_data_update( body: Dict, ) -> Response: """Update task data.""" - process_instance = ProcessInstanceModel.query.filter( - ProcessInstanceModel.id == int(process_instance_id) - ).first() + process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first() if process_instance: if process_instance.status != "suspended": raise ProcessInstanceTaskDataCannotBeUpdatedError( @@ -195,10 +184,7 @@ def task_data_update( if process_instance_data is None: raise ApiError( error_code="process_instance_data_not_found", - message=( - "Could not find task data related to process instance:" - f" {process_instance.id}" - ), + message=f"Could not find task data related to process instance: {process_instance.id}", ) process_instance_data_dict = json.loads(process_instance_data.runtime_json) @@ -206,12 +192,8 @@ def task_data_update( new_task_data_str: str = body["new_task_data"] new_task_data_dict = json.loads(new_task_data_str) if task_id in process_instance_data_dict["tasks"]: - process_instance_data_dict["tasks"][task_id][ - "data" - ] = new_task_data_dict - process_instance_data.runtime_json = json.dumps( - process_instance_data_dict - ) + process_instance_data_dict["tasks"][task_id]["data"] = new_task_data_dict + process_instance_data.runtime_json = json.dumps(process_instance_data_dict) db.session.add(process_instance_data) try: db.session.commit() @@ -224,18 +206,12 @@ def task_data_update( else: raise ApiError( error_code="update_task_data_error", - message=( - f"Could not find Task: {task_id} in Instance:" - f" {process_instance_id}." - ), + message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.", ) else: raise ApiError( error_code="update_task_data_error", - message=( - f"Could not update task data for Instance: {process_instance_id}, and" - f" Task: {task_id}." - ), + message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.", ) return Response( json.dumps(ProcessInstanceModelSchema().dump(process_instance)), @@ -268,9 +244,7 @@ def send_bpmn_event( body: Dict, ) -> Response: """Send a bpmn event to a workflow.""" - process_instance = ProcessInstanceModel.query.filter( - ProcessInstanceModel.id == int(process_instance_id) - ).first() + process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first() if process_instance: processor = ProcessInstanceProcessor(process_instance) processor.send_bpmn_event(body) @@ -294,18 +268,14 @@ def manual_complete_task( ) -> Response: """Mark a task complete without executing it.""" execute = body.get("execute", True) - process_instance = ProcessInstanceModel.query.filter( - ProcessInstanceModel.id == int(process_instance_id) - ).first() + process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first() if process_instance: processor = ProcessInstanceProcessor(process_instance) processor.manual_complete_task(task_id, execute) else: raise ApiError( error_code="complete_task", - message=( - f"Could not complete Task {task_id} in Instance {process_instance_id}" - ), + message=f"Could not complete Task {task_id} in Instance {process_instance_id}", ) return Response( json.dumps(ProcessInstanceModelSchema().dump(process_instance)), @@ -332,9 +302,7 @@ def _find_process_instance_by_id_or_raise( process_instance_id: int, ) -> ProcessInstanceModel: """Find_process_instance_by_id_or_raise.""" - process_instance_query = ProcessInstanceModel.query.filter_by( - id=process_instance_id - ) + process_instance_query = ProcessInstanceModel.query.filter_by(id=process_instance_id) # we had a frustrating session trying to do joins and access columns from two tables. here's some notes for our future selves: # this returns an object that allows you to do: process_instance.UserModel.username diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_groups_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_groups_controller.py index 472e0358..114d327e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_groups_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_groups_controller.py @@ -44,9 +44,7 @@ def process_group_create(body: dict) -> flask.wrappers.Response: ) ProcessModelService.add_process_group(process_group) - _commit_and_push_to_git( - f"User: {g.user.username} added process group {process_group.id}" - ) + _commit_and_push_to_git(f"User: {g.user.username} added process group {process_group.id}") return make_response(jsonify(process_group), 201) @@ -63,22 +61,14 @@ def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Respo status_code=400, ) from exception - _commit_and_push_to_git( - f"User: {g.user.username} deleted process group {process_group_id}" - ) + _commit_and_push_to_git(f"User: {g.user.username} deleted process group {process_group_id}") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") -def process_group_update( - modified_process_group_id: str, body: dict -) -> flask.wrappers.Response: +def process_group_update(modified_process_group_id: str, body: dict) -> flask.wrappers.Response: """Process Group Update.""" body_include_list = ["display_name", "description"] - body_filtered = { - include_item: body[include_item] - for include_item in body_include_list - if include_item in body - } + body_filtered = {include_item: body[include_item] for include_item in body_include_list if include_item in body} process_group_id = _un_modify_modified_process_model_id(modified_process_group_id) if not ProcessModelService.is_process_group_identifier(process_group_id): @@ -90,9 +80,7 @@ def process_group_update( process_group = ProcessGroup(id=process_group_id, **body_filtered) ProcessModelService.update_process_group(process_group) - _commit_and_push_to_git( - f"User: {g.user.username} updated process group {process_group_id}" - ) + _commit_and_push_to_git(f"User: {g.user.username} updated process group {process_group_id}") return make_response(jsonify(process_group), 200) @@ -101,14 +89,10 @@ def process_group_list( ) -> flask.wrappers.Response: """Process_group_list.""" if process_group_identifier is not None: - process_groups = ProcessModelService.get_process_groups( - process_group_identifier - ) + process_groups = ProcessModelService.get_process_groups(process_group_identifier) else: process_groups = ProcessModelService.get_process_groups() - batch = ProcessModelService().get_batch( - items=process_groups, page=page, per_page=per_page - ) + batch = ProcessModelService().get_batch(items=process_groups, page=page, per_page=per_page) pages = len(process_groups) // per_page remainder = len(process_groups) % per_page if remainder > 0: @@ -141,24 +125,15 @@ def process_group_show( ) ) from exception - process_group.parent_groups = ProcessModelService.get_parent_group_array( - process_group.id - ) + process_group.parent_groups = ProcessModelService.get_parent_group_array(process_group.id) return make_response(jsonify(process_group), 200) -def process_group_move( - modified_process_group_identifier: str, new_location: str -) -> flask.wrappers.Response: +def process_group_move(modified_process_group_identifier: str, new_location: str) -> flask.wrappers.Response: """Process_group_move.""" - original_process_group_id = _un_modify_modified_process_model_id( - modified_process_group_identifier - ) - new_process_group = ProcessModelService().process_group_move( - original_process_group_id, new_location - ) + original_process_group_id = _un_modify_modified_process_model_id(modified_process_group_identifier) + new_process_group = ProcessModelService().process_group_move(original_process_group_id, new_location) _commit_and_push_to_git( - f"User: {g.user.username} moved process group {original_process_group_id} to" - f" {new_process_group.id}" + f"User: {g.user.username} moved process group {original_process_group_id} to {new_process_group.id}" ) return make_response(jsonify(new_process_group), 200) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 406ab2ef..489b710c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -88,9 +88,7 @@ def process_instance_create( modified_process_model_identifier: str, ) -> flask.wrappers.Response: """Create_process_instance.""" - process_model_identifier = _un_modify_modified_process_model_id( - modified_process_model_identifier - ) + process_model_identifier = _un_modify_modified_process_model_id(modified_process_model_identifier) process_model = _get_process_model(process_model_identifier) if process_model.primary_file_name is None: @@ -103,10 +101,8 @@ def process_instance_create( status_code=400, ) - process_instance = ( - ProcessInstanceService.create_process_instance_from_process_model_identifier( - process_model_identifier, g.user - ) + process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier( + process_model_identifier, g.user ) ProcessInstanceQueueService.enqueue(process_instance) return Response( @@ -126,10 +122,7 @@ def process_instance_run( if process_instance.status != "not_started": raise ApiError( error_code="process_instance_not_runnable", - message=( - f"Process Instance ({process_instance.id}) is currently running or has" - " already run." - ), + message=f"Process Instance ({process_instance.id}) is currently running or has already run.", status_code=400, ) @@ -163,15 +156,11 @@ def process_instance_run( if not current_app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]: MessageService.correlate_all_message_instances() - process_instance_api = ProcessInstanceService.processor_to_process_instance_api( - processor - ) + process_instance_api = ProcessInstanceService.processor_to_process_instance_api(processor) process_instance_data = processor.get_data() process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api) process_instance_metadata["data"] = process_instance_data - return Response( - json.dumps(process_instance_metadata), status=200, mimetype="application/json" - ) + return Response(json.dumps(process_instance_metadata), status=200, mimetype="application/json") def process_instance_terminate( @@ -216,9 +205,7 @@ def process_instance_log_list( # to make sure the process instance exists process_instance = _find_process_instance_by_id_or_raise(process_instance_id) - log_query = SpiffLoggingModel.query.filter( - SpiffLoggingModel.process_instance_id == process_instance.id - ) + log_query = SpiffLoggingModel.query.filter(SpiffLoggingModel.process_instance_id == process_instance.id) if not detailed: log_query = log_query.filter( # 1. this was the previous implementation, where we only show completed tasks and skipped tasks. @@ -231,9 +218,7 @@ def process_instance_log_list( # we decided to remove them, since they get really chatty when there are lots of subprocesses and call activities. and_( SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore - SpiffLoggingModel.bpmn_task_type.in_( # type: ignore - ["Default Throwing Event"] - ), + SpiffLoggingModel.bpmn_task_type.in_(["Default Throwing Event"]), # type: ignore ) ) @@ -317,9 +302,7 @@ def process_instance_list( report_filter_by: Optional[str] = None, ) -> flask.wrappers.Response: """Process_instance_list.""" - process_instance_report = ProcessInstanceReportService.report_with_identifier( - g.user, report_id, report_identifier - ) + process_instance_report = ProcessInstanceReportService.report_with_identifier(g.user, report_id, report_identifier) report_column_list = None if report_columns: @@ -343,21 +326,19 @@ def process_instance_list( report_filter_by_list=report_filter_by_list, ) else: - report_filter = ( - ProcessInstanceReportService.filter_from_metadata_with_overrides( - process_instance_report=process_instance_report, - process_model_identifier=process_model_identifier, - user_group_identifier=user_group_identifier, - start_from=start_from, - start_to=start_to, - end_from=end_from, - end_to=end_to, - process_status=process_status, - with_relation_to_me=with_relation_to_me, - process_initiator_username=process_initiator_username, - report_column_list=report_column_list, - report_filter_by_list=report_filter_by_list, - ) + report_filter = ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report=process_instance_report, + process_model_identifier=process_model_identifier, + user_group_identifier=user_group_identifier, + start_from=start_from, + start_to=start_to, + end_from=end_from, + end_to=end_to, + process_status=process_status, + with_relation_to_me=with_relation_to_me, + process_initiator_username=process_initiator_username, + report_column_list=report_column_list, + report_filter_by_list=report_filter_by_list, ) response_json = ProcessInstanceReportService.run_process_instance_report( @@ -381,8 +362,7 @@ def process_instance_report_column_list() -> flask.wrappers.Response: .all() ) columns_for_metadata_strings = [ - {"Header": i[0], "accessor": i[0], "filterable": True} - for i in columns_for_metadata + {"Header": i[0], "accessor": i[0], "filterable": True} for i in columns_for_metadata ] return make_response(jsonify(table_columns + columns_for_metadata_strings), 200) @@ -429,23 +409,15 @@ def process_instance_delete( # (Pdb) db.session.delete # > - db.session.query(SpiffLoggingModel).filter_by( - process_instance_id=process_instance.id - ).delete() - db.session.query(SpiffStepDetailsModel).filter_by( - process_instance_id=process_instance.id - ).delete() - db.session.query(ProcessInstanceQueueModel).filter_by( - process_instance_id=process_instance.id - ).delete() + db.session.query(SpiffLoggingModel).filter_by(process_instance_id=process_instance.id).delete() + db.session.query(SpiffStepDetailsModel).filter_by(process_instance_id=process_instance.id).delete() + db.session.query(ProcessInstanceQueueModel).filter_by(process_instance_id=process_instance.id).delete() db.session.delete(process_instance) db.session.commit() return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") -def process_instance_report_list( - page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: +def process_instance_report_list(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: """Process_instance_report_list.""" process_instance_reports = ProcessInstanceReportModel.query.filter_by( created_by_id=g.user.id, @@ -530,9 +502,7 @@ def process_instance_report_show( ) substitution_variables = request.args.to_dict() - result_dict = process_instance_report.generate_report( - process_instances.items, substitution_variables - ) + result_dict = process_instance_report.generate_report(process_instances.items, substitution_variables) # update this if we go back to a database query instead of filtering in memory result_dict["pagination"] = { @@ -593,9 +563,7 @@ def process_instance_task_list( ) if spiff_step > 0: - step_detail_query = step_detail_query.filter( - SpiffStepDetailsModel.spiff_step <= spiff_step - ) + step_detail_query = step_detail_query.filter(SpiffStepDetailsModel.spiff_step <= spiff_step) step_details = step_detail_query.all() @@ -619,9 +587,7 @@ def process_instance_task_list( for spiff_task in subprocess["tasks"].values(): restore_task(spiff_task, last_change) - bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict( - full_bpmn_process_dict - ) + bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict(full_bpmn_process_dict) if spiff_step > 0: bpmn_process_instance.complete_task_from_id(UUID(step_details[-1].task_id)) for subprocess_id, subprocess in bpmn_process_instance.subprocesses.items(): @@ -653,8 +619,7 @@ def process_instance_task_list( current_tasks[row_id] = spiff_task if ( row_id not in spiff_tasks_by_process_id_and_task_name - or spiff_task.state - > spiff_tasks_by_process_id_and_task_name[row_id].state + or spiff_task.state > spiff_tasks_by_process_id_and_task_name[row_id].state ): spiff_tasks_by_process_id_and_task_name[row_id] = spiff_task spiff_tasks_by_process_id_and_task_name.update(current_tasks) @@ -665,9 +630,7 @@ def process_instance_task_list( task_spiff_step: Optional[int] = None if str(spiff_task.id) in steps_by_id: task_spiff_step = steps_by_id[str(spiff_task.id)].spiff_step - calling_subprocess_task_id = subprocesses_by_child_task_ids.get( - str(spiff_task.id), None - ) + calling_subprocess_task_id = subprocesses_by_child_task_ids.get(str(spiff_task.id), None) task = ProcessInstanceService.spiff_task_to_api_task( processor, spiff_task, @@ -698,14 +661,10 @@ def process_instance_find_by_id( ) -> flask.wrappers.Response: """Process_instance_find_by_id.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) - modified_process_model_identifier = ( - ProcessModelInfo.modify_process_identifier_for_path_param( - process_instance.process_model_identifier - ) - ) - process_instance_uri = ( - f"/process-instances/{modified_process_model_identifier}/{process_instance.id}" + modified_process_model_identifier = ProcessModelInfo.modify_process_identifier_for_path_param( + process_instance.process_model_identifier ) + process_instance_uri = f"/process-instances/{modified_process_model_identifier}/{process_instance.id}" has_permission = AuthorizationService.user_has_permission( user=g.user, permission="read", @@ -739,32 +698,22 @@ def _get_process_instance( process_model_with_diagram = None name_of_file_with_diagram = None if process_identifier: - spec_reference = SpecReferenceCache.query.filter_by( - identifier=process_identifier, type="process" - ).first() + spec_reference = SpecReferenceCache.query.filter_by(identifier=process_identifier, type="process").first() if spec_reference is None: raise SpecReferenceNotFoundError( - "Could not find given process identifier in the cache:" - f" {process_identifier}" + f"Could not find given process identifier in the cache: {process_identifier}" ) - process_model_with_diagram = ProcessModelService.get_process_model( - spec_reference.process_model_id - ) + process_model_with_diagram = ProcessModelService.get_process_model(spec_reference.process_model_id) name_of_file_with_diagram = spec_reference.file_name - process_instance.process_model_with_diagram_identifier = ( - process_model_with_diagram.id - ) + process_instance.process_model_with_diagram_identifier = process_model_with_diagram.id else: process_model_with_diagram = _get_process_model(process_model_identifier) if process_model_with_diagram.primary_file_name: name_of_file_with_diagram = process_model_with_diagram.primary_file_name if process_model_with_diagram and name_of_file_with_diagram: - if ( - process_instance.bpmn_version_control_identifier - == current_version_control_revision - ): + if process_instance.bpmn_version_control_identifier == current_version_control_revision: bpmn_xml_file_contents = SpecFileService.get_data( process_model_with_diagram, name_of_file_with_diagram ).decode("utf-8") @@ -807,10 +756,7 @@ def _find_process_instance_for_me_or_raise( raise ( ApiError( error_code="process_instance_cannot_be_found", - message=( - f"Process instance with id {process_instance_id} cannot be found" - " that is associated with you." - ), + message=f"Process instance with id {process_instance_id} cannot be found that is associated with you.", status_code=400, ) ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py index 845349ff..192e7f11 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py @@ -63,11 +63,7 @@ def process_model_create( "fault_or_suspend_on_exception", "exception_notification_addresses", ] - body_filtered = { - include_item: body[include_item] - for include_item in body_include_list - if include_item in body - } + body_filtered = {include_item: body[include_item] for include_item in body_include_list if include_item in body} _get_process_group_from_modified_identifier(modified_process_group_id) @@ -82,25 +78,19 @@ def process_model_create( if ProcessModelService.is_process_model_identifier(process_model_info.id): raise ApiError( error_code="process_model_with_id_already_exists", - message=( - f"Process Model with given id already exists: {process_model_info.id}" - ), + message=f"Process Model with given id already exists: {process_model_info.id}", status_code=400, ) if ProcessModelService.is_process_group_identifier(process_model_info.id): raise ApiError( error_code="process_group_with_id_already_exists", - message=( - f"Process Group with given id already exists: {process_model_info.id}" - ), + message=f"Process Group with given id already exists: {process_model_info.id}", status_code=400, ) ProcessModelService.add_process_model(process_model_info) - _commit_and_push_to_git( - f"User: {g.user.username} created process model {process_model_info.id}" - ) + _commit_and_push_to_git(f"User: {g.user.username} created process model {process_model_info.id}") return Response( json.dumps(ProcessModelInfoSchema().dump(process_model_info)), status=201, @@ -122,9 +112,7 @@ def process_model_delete( status_code=400, ) from exception - _commit_and_push_to_git( - f"User: {g.user.username} deleted process model {process_model_identifier}" - ) + _commit_and_push_to_git(f"User: {g.user.username} deleted process model {process_model_identifier}") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -143,11 +131,7 @@ def process_model_update( "fault_or_suspend_on_exception", "exception_notification_addresses", ] - body_filtered = { - include_item: body[include_item] - for include_item in body_include_list - if include_item in body - } + body_filtered = {include_item: body[include_item] for include_item in body_include_list if include_item in body} process_model = _get_process_model(process_model_identifier) @@ -156,10 +140,7 @@ def process_model_update( # All we really need this for is to get the process id from a bpmn file so maybe that could # all be moved to FileSystemService. update_primary_bpmn_file = False - if ( - "primary_file_name" in body_filtered - and "primary_process_id" not in body_filtered - ): + if "primary_file_name" in body_filtered and "primary_process_id" not in body_filtered: if process_model.primary_file_name != body_filtered["primary_file_name"]: update_primary_bpmn_file = True @@ -167,22 +148,14 @@ def process_model_update( # update the file to ensure we get the correct process id if the primary file changed. if update_primary_bpmn_file and process_model.primary_file_name: - primary_file_contents = SpecFileService.get_data( - process_model, process_model.primary_file_name - ) - SpecFileService.update_file( - process_model, process_model.primary_file_name, primary_file_contents - ) + primary_file_contents = SpecFileService.get_data(process_model, process_model.primary_file_name) + SpecFileService.update_file(process_model, process_model.primary_file_name, primary_file_contents) - _commit_and_push_to_git( - f"User: {g.user.username} updated process model {process_model_identifier}" - ) + _commit_and_push_to_git(f"User: {g.user.username} updated process model {process_model_identifier}") return ProcessModelInfoSchema().dump(process_model) -def process_model_show( - modified_process_model_identifier: str, include_file_references: bool = False -) -> Any: +def process_model_show(modified_process_model_identifier: str, include_file_references: bool = False) -> Any: """Process_model_show.""" process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model = _get_process_model(process_model_identifier) @@ -194,13 +167,9 @@ def process_model_show( if include_file_references: for file in process_model.files: - file.references = SpecFileService.get_references_for_file( - file, process_model - ) + file.references = SpecFileService.get_references_for_file(file, process_model) - process_model.parent_groups = ProcessModelService.get_parent_group_array( - process_model.id - ) + process_model.parent_groups = ProcessModelService.get_parent_group_array(process_model.id) try: current_git_revision = GitService.get_current_revision() except GitCommandError: @@ -210,19 +179,12 @@ def process_model_show( return make_response(jsonify(process_model), 200) -def process_model_move( - modified_process_model_identifier: str, new_location: str -) -> flask.wrappers.Response: +def process_model_move(modified_process_model_identifier: str, new_location: str) -> flask.wrappers.Response: """Process_model_move.""" - original_process_model_id = _un_modify_modified_process_model_id( - modified_process_model_identifier - ) - new_process_model = ProcessModelService().process_model_move( - original_process_model_id, new_location - ) + original_process_model_id = _un_modify_modified_process_model_id(modified_process_model_identifier) + new_process_model = ProcessModelService().process_model_move(original_process_model_id, new_location) _commit_and_push_to_git( - f"User: {g.user.username} moved process model {original_process_model_id} to" - f" {new_process_model.id}" + f"User: {g.user.username} moved process model {original_process_model_id} to {new_process_model.id}" ) return make_response(jsonify(new_process_model), 200) @@ -232,17 +194,13 @@ def process_model_publish( ) -> flask.wrappers.Response: """Process_model_publish.""" if branch_to_update is None: - branch_to_update = current_app.config[ - "SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH" - ] + branch_to_update = current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"] if branch_to_update is None: raise MissingGitConfigsError( "Missing config for SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH. " "This is required for publishing process models" ) - process_model_identifier = _un_modify_modified_process_model_id( - modified_process_model_identifier - ) + process_model_identifier = _un_modify_modified_process_model_id(modified_process_model_identifier) pr_url = GitService().publish(process_model_identifier, branch_to_update) data = {"ok": True, "pr_url": pr_url} return Response(json.dumps(data), status=200, mimetype="application/json") @@ -262,21 +220,15 @@ def process_model_list( recursive=recursive, filter_runnable_by_user=filter_runnable_by_user, ) - process_models_to_return = ProcessModelService().get_batch( - process_models, page=page, per_page=per_page - ) + process_models_to_return = ProcessModelService().get_batch(process_models, page=page, per_page=per_page) if include_parent_groups: process_group_cache = IdToProcessGroupMapping({}) for process_model in process_models_to_return: - parent_group_lites_with_cache = ( - ProcessModelService.get_parent_group_array_and_cache_it( - process_model.id, process_group_cache - ) + parent_group_lites_with_cache = ProcessModelService.get_parent_group_array_and_cache_it( + process_model.id, process_group_cache ) - process_model.parent_groups = parent_group_lites_with_cache[ - "process_groups" - ] + process_model.parent_groups = parent_group_lites_with_cache["process_groups"] pages = len(process_models) // per_page remainder = len(process_models) % per_page @@ -293,19 +245,13 @@ def process_model_list( return make_response(jsonify(response_json), 200) -def process_model_file_update( - modified_process_model_identifier: str, file_name: str -) -> flask.wrappers.Response: +def process_model_file_update(modified_process_model_identifier: str, file_name: str) -> flask.wrappers.Response: """Process_model_file_update.""" message = f"User: {g.user.username} clicked save for" - return _create_or_update_process_model_file( - modified_process_model_identifier, message, 200 - ) + return _create_or_update_process_model_file(modified_process_model_identifier, message, 200) -def process_model_file_delete( - modified_process_model_identifier: str, file_name: str -) -> flask.wrappers.Response: +def process_model_file_delete(modified_process_model_identifier: str, file_name: str) -> flask.wrappers.Response: """Process_model_file_delete.""" process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model = _get_process_model(process_model_identifier) @@ -333,8 +279,7 @@ def process_model_file_delete( ) from exception _commit_and_push_to_git( - f"User: {g.user.username} deleted process model file" - f" {process_model_identifier}/{file_name}" + f"User: {g.user.username} deleted process model file {process_model_identifier}/{file_name}" ) return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -344,14 +289,10 @@ def process_model_file_create( ) -> flask.wrappers.Response: """Process_model_file_create.""" message = f"User: {g.user.username} added process model file" - return _create_or_update_process_model_file( - modified_process_model_identifier, message, 201 - ) + return _create_or_update_process_model_file(modified_process_model_identifier, message, 201) -def process_model_file_show( - modified_process_model_identifier: str, file_name: str -) -> Any: +def process_model_file_show(modified_process_model_identifier: str, file_name: str) -> Any: """Process_model_file_show.""" process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model = _get_process_model(process_model_identifier) @@ -360,8 +301,7 @@ def process_model_file_show( raise ApiError( error_code="unknown file", message=( - f"No information exists for file {file_name}" - f" it does not exist in workflow {process_model_identifier}." + f"No information exists for file {file_name} it does not exist in workflow {process_model_identifier}." ), status_code=404, ) @@ -382,17 +322,13 @@ def process_model_create_with_natural_language( ) -> flask.wrappers.Response: """Process_model_create_with_natural_language.""" pattern = re.compile( - r"Create a (?P.*?) process model with a (?P.*?) form that" - r" collects (?P.*)" + r"Create a (?P.*?) process model with a (?P.*?) form that" r" collects (?P.*)" ) match = pattern.match(body["natural_language_text"]) if match is None: raise ApiError( error_code="natural_language_text_not_yet_supported", - message=( - "Natural language text is not yet supported. Please use the form:" - f" {pattern.pattern}" - ), + message=f"Natural language text is not yet supported. Please use the form: {pattern.pattern}", status_code=400, ) process_model_display_name = match.group("pm_name") @@ -406,12 +342,8 @@ def process_model_create_with_natural_language( column_names = match.group("columns") columns = re.sub(r"(, (and )?)", ",", column_names).split(",") - process_group = _get_process_group_from_modified_identifier( - modified_process_group_id - ) - qualified_process_model_identifier = ( - f"{process_group.id}/{process_model_identifier}" - ) + process_group = _get_process_group_from_modified_identifier(modified_process_group_id) + qualified_process_model_identifier = f"{process_group.id}/{process_model_identifier}" metadata_extraction_paths = [] for column in columns: @@ -432,9 +364,7 @@ def process_model_create_with_natural_language( status_code=400, ) - bpmn_template_file = os.path.join( - current_app.root_path, "templates", "basic_with_user_task_template.bpmn" - ) + bpmn_template_file = os.path.join(current_app.root_path, "templates", "basic_with_user_task_template.bpmn") if not os.path.exists(bpmn_template_file): raise ApiError( error_code="bpmn_template_file_does_not_exist", @@ -451,9 +381,7 @@ def process_model_create_with_natural_language( bpmn_template_contents = bpmn_template_contents.replace( "natural_language_process_id_template", bpmn_process_identifier ) - bpmn_template_contents = bpmn_template_contents.replace( - "form-identifier-id-template", form_identifier - ) + bpmn_template_contents = bpmn_template_contents.replace("form-identifier-id-template", form_identifier) form_uischema_json: dict = {"ui:order": columns} @@ -487,21 +415,14 @@ def process_model_create_with_natural_language( ) _commit_and_push_to_git( - f"User: {g.user.username} created process model via natural language:" - f" {process_model_info.id}" + f"User: {g.user.username} created process model via natural language: {process_model_info.id}" ) - default_report_metadata = ProcessInstanceReportService.system_metadata_map( - "default" - ) + default_report_metadata = ProcessInstanceReportService.system_metadata_map("default") if default_report_metadata is None: - raise ProcessInstanceReportNotFoundError( - "Could not find a report with identifier 'default'" - ) + raise ProcessInstanceReportNotFoundError("Could not find a report with identifier 'default'") for column in columns: - default_report_metadata["columns"].append( - {"Header": column, "accessor": column, "filterable": True} - ) + default_report_metadata["columns"].append({"Header": column, "accessor": column, "filterable": True}) ProcessInstanceReportModel.create_report( identifier=process_model_identifier, user=g.user, @@ -534,16 +455,11 @@ def _get_process_group_from_modified_identifier( if modified_process_group_id is None: raise ApiError( error_code="process_group_id_not_specified", - message=( - "Process Model could not be created when process_group_id path param is" - " unspecified" - ), + message="Process Model could not be created when process_group_id path param is unspecified", status_code=400, ) - unmodified_process_group_id = _un_modify_modified_process_model_id( - modified_process_group_id - ) + unmodified_process_group_id = _un_modify_modified_process_model_id(modified_process_group_id) process_group = ProcessModelService.get_process_group(unmodified_process_group_id) if process_group is None: raise ApiError( @@ -584,26 +500,19 @@ def _create_or_update_process_model_file( file = None try: - file = SpecFileService.update_file( - process_model, request_file.filename, request_file_contents - ) + file = SpecFileService.update_file(process_model, request_file.filename, request_file_contents) except ProcessModelFileInvalidError as exception: raise ( ApiError( error_code="process_model_file_invalid", - message=( - f"Invalid Process model file: {request_file.filename}." - f" Received error: {str(exception)}" - ), + message=f"Invalid Process model file: {request_file.filename}. Received error: {str(exception)}", status_code=400, ) ) from exception file_contents = SpecFileService.get_data(process_model, file.name) file.file_contents = file_contents file.process_model_id = process_model.id - _commit_and_push_to_git( - f"{message_for_git_commit} {process_model_identifier}/{file.name}" - ) + _commit_and_push_to_git(f"{message_for_git_commit} {process_model_identifier}/{file.name}") return Response( json.dumps(FileSchema().dump(file)), diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py index 1af8febb..303dd94a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py @@ -26,13 +26,9 @@ def script_unit_test_create( modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] ) -> flask.wrappers.Response: """Script_unit_test_create.""" - bpmn_task_identifier = _get_required_parameter_or_raise( - "bpmn_task_identifier", body - ) + bpmn_task_identifier = _get_required_parameter_or_raise("bpmn_task_identifier", body) input_json = _get_required_parameter_or_raise("input_json", body) - expected_output_json = _get_required_parameter_or_raise( - "expected_output_json", body - ) + expected_output_json = _get_required_parameter_or_raise("expected_output_json", body) process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model = _get_process_model(process_model_identifier) @@ -40,10 +36,7 @@ def script_unit_test_create( if file is None: raise ApiError( error_code="cannot_find_file", - message=( - "Could not find the primary bpmn file for process_model:" - f" {process_model.id}" - ), + message=f"Could not find the primary bpmn file for process_model: {process_model.id}", status_code=404, ) @@ -52,9 +45,7 @@ def script_unit_test_create( bpmn_etree_element = SpecFileService.get_etree_from_xml_bytes(file_contents) nsmap = bpmn_etree_element.nsmap - spiff_element_maker = ElementMaker( - namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap - ) + spiff_element_maker = ElementMaker(namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap) script_task_elements = bpmn_etree_element.xpath( f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']", @@ -74,9 +65,7 @@ def script_unit_test_create( namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, ) if len(extension_elements_array) == 0: - bpmn_element_maker = ElementMaker( - namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap - ) + bpmn_element_maker = ElementMaker(namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap) extension_elements = bpmn_element_maker("extensionElements") script_task_element.append(extension_elements) else: @@ -93,23 +82,16 @@ def script_unit_test_create( else: unit_test_elements = unit_test_elements_array[0] - fuzz = "".join( - random.choice(string.ascii_uppercase + string.digits) # noqa: S311 - for _ in range(7) - ) + fuzz = "".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(7)) # noqa: S311 unit_test_id = f"unit_test_{fuzz}" input_json_element = spiff_element_maker("inputJson", json.dumps(input_json)) - expected_output_json_element = spiff_element_maker( - "expectedOutputJson", json.dumps(expected_output_json) - ) + expected_output_json_element = spiff_element_maker("expectedOutputJson", json.dumps(expected_output_json)) unit_test_element = spiff_element_maker("unitTest", id=unit_test_id) unit_test_element.append(input_json_element) unit_test_element.append(expected_output_json_element) unit_test_elements.append(unit_test_element) - SpecFileService.update_file( - process_model, file.name, etree.tostring(bpmn_etree_element) - ) + SpecFileService.update_file(process_model, file.name, etree.tostring(bpmn_etree_element)) return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") @@ -124,9 +106,7 @@ def script_unit_test_run( python_script = _get_required_parameter_or_raise("python_script", body) input_json = _get_required_parameter_or_raise("input_json", body) - expected_output_json = _get_required_parameter_or_raise( - "expected_output_json", body - ) + expected_output_json = _get_required_parameter_or_raise("expected_output_json", body) result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts( python_script, input_json, expected_output_json diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/service_tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/service_tasks_controller.py index 13df219a..9920a207 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/service_tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/service_tasks_controller.py @@ -17,9 +17,7 @@ from spiffworkflow_backend.services.service_task_service import ServiceTaskServi def service_task_list() -> flask.wrappers.Response: """Service_task_list.""" available_connectors = ServiceTaskService.available_connectors() - return Response( - json.dumps(available_connectors), status=200, mimetype="application/json" - ) + return Response(json.dumps(available_connectors), status=200, mimetype="application/json") def authentication_list() -> flask.wrappers.Response: @@ -27,9 +25,7 @@ def authentication_list() -> flask.wrappers.Response: available_authentications = ServiceTaskService.authentication_list() response_json = { "results": available_authentications, - "connector_proxy_base_url": current_app.config[ - "SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL" - ], + "connector_proxy_base_url": current_app.config["SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL"], "redirect_url": f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL']}/v1.0/authentication_callback", } @@ -43,9 +39,5 @@ def authentication_callback( """Authentication_callback.""" verify_token(request.args.get("token"), force_run=True) response = request.args["response"] - SecretService.update_secret( - f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True - ) - return redirect( - f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND']}/admin/configuration" - ) + SecretService.update_secret(f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True) + return redirect(f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND']}/admin/configuration") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 0afd721f..db13c407 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -104,9 +104,7 @@ def task_list_my_tasks( ProcessInstanceModel.status != ProcessInstanceStatus.error.value, ) - potential_owner_usernames_from_group_concat_or_similar = ( - _get_potential_owner_usernames(assigned_user) - ) + potential_owner_usernames_from_group_concat_or_similar = _get_potential_owner_usernames(assigned_user) # FIXME: this breaks postgres. Look at commit c147cdb47b1481f094b8c3d82dc502fe961f4977 for # the postgres fix but it breaks the method for mysql. @@ -147,9 +145,7 @@ def task_list_my_tasks( return make_response(jsonify(response_json), 200) -def task_list_for_my_open_processes( - page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: +def task_list_for_my_open_processes(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: """Task_list_for_my_open_processes.""" return _get_tasks(page=page, per_page=per_page) @@ -194,10 +190,7 @@ def task_data_show( if step_detail is None: raise ApiError( error_code="spiff_step_for_proces_instance_not_found", - message=( - "The given spiff step for the given process instance could not be" - " found." - ), + message="The given spiff step for the given process instance could not be found.", status_code=400, ) @@ -228,9 +221,7 @@ def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> Non for ii, hidden_field_part in enumerate(hidden_field_parts): if hidden_field_part not in relevant_depth_of_ui_schema: relevant_depth_of_ui_schema[hidden_field_part] = {} - relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[ - hidden_field_part - ] + relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[hidden_field_part] if len(hidden_field_parts) == ii + 1: relevant_depth_of_ui_schema["ui:widget"] = "hidden" @@ -255,9 +246,7 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response form_schema_file_name = "" form_ui_schema_file_name = "" processor = ProcessInstanceProcessor(process_instance) - spiff_task = _get_spiff_task_from_process_instance( - task_id, process_instance, processor=processor - ) + spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance, processor=processor) extensions = spiff_task.task_spec.extensions if "properties" in extensions: @@ -276,23 +265,13 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response refs = SpecFileService.get_references_for_process(process_model_with_form) all_processes = [i.identifier for i in refs] if task.process_identifier not in all_processes: - top_process_name = processor.find_process_model_process_name_by_task_name( - task.process_identifier - ) - bpmn_file_full_path = ( - ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier( - top_process_name - ) - ) - relative_path = os.path.relpath( - bpmn_file_full_path, start=FileSystemService.root_path() + top_process_name = processor.find_process_model_process_name_by_task_name(task.process_identifier) + bpmn_file_full_path = ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier( + top_process_name ) + relative_path = os.path.relpath(bpmn_file_full_path, start=FileSystemService.root_path()) process_model_relative_path = os.path.dirname(relative_path) - process_model_with_form = ( - ProcessModelService.get_process_model_from_relative_path( - process_model_relative_path - ) - ) + process_model_with_form = ProcessModelService.get_process_model_from_relative_path(process_model_relative_path) if task.type == "User Task": if not form_schema_file_name: @@ -300,8 +279,7 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response ApiError( error_code="missing_form_file", message=( - "Cannot find a form file for process_instance_id:" - f" {process_instance_id}, task_id: {task_id}" + f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}" ), status_code=400, ) @@ -338,9 +316,7 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response ) except WorkflowTaskException as wfe: wfe.add_note("Failed to render instructions for end user.") - raise ApiError.from_workflow_exception( - "instructions_error", str(wfe), exp=wfe - ) from wfe + raise ApiError.from_workflow_exception("instructions_error", str(wfe), exp=wfe) from wfe return make_response(jsonify(task), 200) @@ -387,12 +363,8 @@ def task_submit_shared( ) processor = ProcessInstanceProcessor(process_instance) - spiff_task = _get_spiff_task_from_process_instance( - task_id, process_instance, processor=processor - ) - AuthorizationService.assert_user_can_complete_spiff_task( - process_instance.id, spiff_task, principal.user - ) + spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance, processor=processor) + AuthorizationService.assert_user_can_complete_spiff_task(process_instance.id, spiff_task, principal.user) if spiff_task.state != TaskState.READY: raise ( @@ -434,18 +406,14 @@ def task_submit_shared( # next_task = processor.next_task() next_human_task_assigned_to_me = ( - HumanTaskModel.query.filter_by( - process_instance_id=process_instance_id, completed=False - ) + HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, completed=False) .order_by(asc(HumanTaskModel.id)) # type: ignore .join(HumanTaskUserModel) .filter_by(user_id=principal.user_id) .first() ) if next_human_task_assigned_to_me: - return make_response( - jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200 - ) + return make_response(jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200) return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") @@ -457,9 +425,7 @@ def task_submit( terminate_loop: bool = False, ) -> flask.wrappers.Response: """Task_submit_user_data.""" - with sentry_sdk.start_span( - op="controller_action", description="tasks_controller.task_submit" - ): + with sentry_sdk.start_span(op="controller_action", description="tasks_controller.task_submit"): return task_submit_shared(process_instance_id, task_id, body, terminate_loop) @@ -492,9 +458,7 @@ def _get_tasks( assigned_user = aliased(UserModel) if processes_started_by_user: human_tasks_query = ( - human_tasks_query.filter( - ProcessInstanceModel.process_initiator_id == user_id - ) + human_tasks_query.filter(ProcessInstanceModel.process_initiator_id == user_id) .outerjoin( HumanTaskUserModel, HumanTaskModel.id == HumanTaskUserModel.human_task_id, @@ -502,9 +466,7 @@ def _get_tasks( .outerjoin(assigned_user, assigned_user.id == HumanTaskUserModel.user_id) ) else: - human_tasks_query = human_tasks_query.filter( - ProcessInstanceModel.process_initiator_id != user_id - ).join( + human_tasks_query = human_tasks_query.filter(ProcessInstanceModel.process_initiator_id != user_id).join( HumanTaskUserModel, and_( HumanTaskUserModel.user_id == user_id, @@ -514,9 +476,7 @@ def _get_tasks( if has_lane_assignment_id: if user_group_identifier: - human_tasks_query = human_tasks_query.filter( - GroupModel.identifier == user_group_identifier - ) + human_tasks_query = human_tasks_query.filter(GroupModel.identifier == user_group_identifier) else: human_tasks_query = human_tasks_query.filter( HumanTaskModel.lane_assignment_id.is_not(None) # type: ignore @@ -524,9 +484,7 @@ def _get_tasks( else: human_tasks_query = human_tasks_query.filter(HumanTaskModel.lane_assignment_id.is_(None)) # type: ignore - potential_owner_usernames_from_group_concat_or_similar = ( - _get_potential_owner_usernames(assigned_user) - ) + potential_owner_usernames_from_group_concat_or_similar = _get_potential_owner_usernames(assigned_user) human_tasks = ( human_tasks_query.add_columns( @@ -558,9 +516,7 @@ def _get_tasks( return make_response(jsonify(response_json), 200) -def _prepare_form_data( - form_file: str, spiff_task: SpiffTask, process_model: ProcessModelInfo -) -> dict: +def _prepare_form_data(form_file: str, spiff_task: SpiffTask, process_model: ProcessModelInfo) -> dict: """Prepare_form_data.""" if spiff_task.data is None: return {} @@ -576,42 +532,29 @@ def _prepare_form_data( raise ( ApiError( error_code="error_loading_form", - message=( - f"Could not load form schema from: {form_file}." - f" Error was: {str(exception)}" - ), + message=f"Could not load form schema from: {form_file}. Error was: {str(exception)}", status_code=400, ) ) from exception except WorkflowTaskException as wfe: wfe.add_note(f"Error in Json Form File '{form_file}'") - api_error = ApiError.from_workflow_exception( - "instructions_error", str(wfe), exp=wfe - ) + api_error = ApiError.from_workflow_exception("instructions_error", str(wfe), exp=wfe) api_error.file_name = form_file raise api_error def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) -> str: """Render_jinja_template.""" - jinja_environment = jinja2.Environment( - autoescape=True, lstrip_blocks=True, trim_blocks=True - ) + jinja_environment = jinja2.Environment(autoescape=True, lstrip_blocks=True, trim_blocks=True) try: template = jinja_environment.from_string(unprocessed_template) return template.render(**spiff_task.data) except jinja2.exceptions.TemplateError as template_error: - wfe = WorkflowTaskException( - str(template_error), task=spiff_task, exception=template_error - ) + wfe = WorkflowTaskException(str(template_error), task=spiff_task, exception=template_error) if isinstance(template_error, TemplateSyntaxError): wfe.line_number = template_error.lineno - wfe.error_line = template_error.source.split("\n")[ - template_error.lineno - 1 - ] - wfe.add_note( - "Jinja2 template errors can happen when trying to display task data" - ) + wfe.error_line = template_error.source.split("\n")[template_error.lineno - 1] + wfe.add_note("Jinja2 template errors can happen when trying to display task data") raise wfe from template_error except Exception as error: _type, _value, tb = exc_info() @@ -621,9 +564,7 @@ def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) -> wfe.line_number = tb.tb_lineno wfe.error_line = unprocessed_template.split("\n")[tb.tb_lineno - 1] tb = tb.tb_next - wfe.add_note( - "Jinja2 template errors can happen when trying to display task data" - ) + wfe.add_note("Jinja2 template errors can happen when trying to display task data") raise wfe from error @@ -650,9 +591,7 @@ def _get_spiff_task_from_process_instance( # originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches -def _update_form_schema_with_task_data_as_needed( - in_dict: dict, task: Task, spiff_task: SpiffTask -) -> None: +def _update_form_schema_with_task_data_as_needed(in_dict: dict, task: Task, spiff_task: SpiffTask) -> None: """Update_nested.""" if task.data is None: return None @@ -664,12 +603,8 @@ def _update_form_schema_with_task_data_as_needed( if len(value) == 1: first_element_in_value_list = value[0] if isinstance(first_element_in_value_list, str): - if first_element_in_value_list.startswith( - "options_from_task_data_var:" - ): - task_data_var = first_element_in_value_list.replace( - "options_from_task_data_var:", "" - ) + if first_element_in_value_list.startswith("options_from_task_data_var:"): + task_data_var = first_element_in_value_list.replace("options_from_task_data_var:", "") if task_data_var not in task.data: wte = WorkflowTaskException( @@ -691,10 +626,7 @@ def _update_form_schema_with_task_data_as_needed( select_options_from_task_data = task.data.get(task_data_var) if isinstance(select_options_from_task_data, list): - if all( - "value" in d and "label" in d - for d in select_options_from_task_data - ): + if all("value" in d and "label" in d for d in select_options_from_task_data): def map_function( task_data_select_option: TaskDataSelectOption, @@ -744,9 +676,7 @@ def _find_human_task_or_raise( process_instance_id=process_instance_id, task_id=task_id, completed=False ) else: - human_task_query = HumanTaskModel.query.filter_by( - process_instance_id=process_instance_id, task_id=task_id - ) + human_task_query = HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, task_id=task_id) human_task: HumanTaskModel = human_task_query.first() if human_task is None: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py index a8969141..14ce1027 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py @@ -80,8 +80,7 @@ def verify_token( user_model = get_user_from_decoded_internal_token(decoded_token) except Exception as e: current_app.logger.error( - "Exception in verify_token getting user from decoded" - f" internal token. {e}" + f"Exception in verify_token getting user from decoded internal token. {e}" ) elif "iss" in decoded_token.keys(): user_info = None @@ -90,22 +89,12 @@ def verify_token( user_info = decoded_token except TokenExpiredError as token_expired_error: # Try to refresh the token - user = UserService.get_user_by_service_and_service_id( - decoded_token["iss"], decoded_token["sub"] - ) + user = UserService.get_user_by_service_and_service_id(decoded_token["iss"], decoded_token["sub"]) if user: refresh_token = AuthenticationService.get_refresh_token(user.id) if refresh_token: - auth_token: dict = ( - AuthenticationService.get_auth_token_from_refresh_token( - refresh_token - ) - ) - if ( - auth_token - and "error" not in auth_token - and "id_token" in auth_token - ): + auth_token: dict = AuthenticationService.get_auth_token_from_refresh_token(refresh_token) + if auth_token and "error" not in auth_token and "id_token" in auth_token: tld = current_app.config["THREAD_LOCAL_DATA"] tld.new_access_token = auth_token["id_token"] tld.new_id_token = auth_token["id_token"] @@ -130,9 +119,7 @@ def verify_token( status_code=401, ) from e if ( - user_info is not None - and "error" not in user_info - and "iss" in user_info + user_info is not None and "error" not in user_info and "iss" in user_info ): # not sure what to test yet user_model = ( UserModel.query.filter(UserModel.service == user_info["iss"]) @@ -154,9 +141,7 @@ def verify_token( ) else: - current_app.logger.debug( - "token_type not in decode_token in verify_token" - ) + current_app.logger.debug("token_type not in decode_token in verify_token") raise ApiError( error_code="invalid_token", message="Invalid token. Please log in.", @@ -175,9 +160,7 @@ def verify_token( else: raise ApiError(error_code="no_user_id", message="Cannot get a user id") - raise ApiError( - error_code="invalid_token", message="Cannot validate token.", status_code=401 - ) + raise ApiError(error_code="invalid_token", message="Cannot validate token.", status_code=401) def set_new_access_token_in_cookie( @@ -193,30 +176,20 @@ def set_new_access_token_in_cookie( "", current_app.config["SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND"], ) - if domain_for_frontend_cookie and domain_for_frontend_cookie.startswith( - "localhost" - ): + if domain_for_frontend_cookie and domain_for_frontend_cookie.startswith("localhost"): domain_for_frontend_cookie = None # fixme - we should not be passing the access token back to the client if hasattr(tld, "new_access_token") and tld.new_access_token: - response.set_cookie( - "access_token", tld.new_access_token, domain=domain_for_frontend_cookie - ) + response.set_cookie("access_token", tld.new_access_token, domain=domain_for_frontend_cookie) # id_token is required for logging out since this gets passed back to the openid server if hasattr(tld, "new_id_token") and tld.new_id_token: - response.set_cookie( - "id_token", tld.new_id_token, domain=domain_for_frontend_cookie - ) + response.set_cookie("id_token", tld.new_id_token, domain=domain_for_frontend_cookie) if hasattr(tld, "user_has_logged_out") and tld.user_has_logged_out: - response.set_cookie( - "id_token", "", max_age=0, domain=domain_for_frontend_cookie - ) - response.set_cookie( - "access_token", "", max_age=0, domain=domain_for_frontend_cookie - ) + response.set_cookie("id_token", "", max_age=0, domain=domain_for_frontend_cookie) + response.set_cookie("access_token", "", max_age=0, domain=domain_for_frontend_cookie) _clear_auth_tokens_from_thread_local_data() @@ -236,9 +209,7 @@ def encode_auth_token(sub: str, token_type: Optional[str] = None) -> str: secret_key = current_app.config.get("SECRET_KEY") else: current_app.logger.error("Missing SECRET_KEY in encode_auth_token") - raise ApiError( - error_code="encode_error", message="Missing SECRET_KEY in encode_auth_token" - ) + raise ApiError(error_code="encode_error", message="Missing SECRET_KEY in encode_auth_token") return jwt.encode( payload, str(secret_key), @@ -249,9 +220,7 @@ def encode_auth_token(sub: str, token_type: Optional[str] = None) -> str: def login(redirect_url: str = "/") -> Response: """Login.""" state = AuthenticationService.generate_state(redirect_url) - login_redirect_url = AuthenticationService().get_login_redirect_url( - state.decode("UTF-8") - ) + login_redirect_url = AuthenticationService().get_login_redirect_url(state.decode("UTF-8")) return redirect(login_redirect_url) @@ -281,9 +250,7 @@ def login_return(code: str, state: str, session_state: str = "") -> Optional[Res g.user = user_model.id g.token = auth_token_object["id_token"] if "refresh_token" in auth_token_object: - AuthenticationService.store_refresh_token( - user_model.id, auth_token_object["refresh_token"] - ) + AuthenticationService.store_refresh_token(user_model.id, auth_token_object["refresh_token"]) redirect_url = state_redirect_url tld = current_app.config["THREAD_LOCAL_DATA"] tld.new_access_token = auth_token_object["id_token"] @@ -325,9 +292,7 @@ def login_api() -> Response: """Login_api.""" redirect_url = "/v1.0/login_api_return" state = AuthenticationService.generate_state(redirect_url) - login_redirect_url = AuthenticationService().get_login_redirect_url( - state.decode("UTF-8"), redirect_url - ) + login_redirect_url = AuthenticationService().get_login_redirect_url(state.decode("UTF-8"), redirect_url) return redirect(login_redirect_url) @@ -335,9 +300,7 @@ def login_api_return(code: str, state: str, session_state: str) -> str: state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8")) state_dict["redirect_url"] - auth_token_object = AuthenticationService().get_auth_token_object( - code, "/v1.0/login_api_return" - ) + auth_token_object = AuthenticationService().get_auth_token_object(code, "/v1.0/login_api_return") access_token: str = auth_token_object["access_token"] if access_token is None: raise MissingAccessTokenError("Cannot find the access token for the request") @@ -365,16 +328,12 @@ def get_decoded_token(token: str) -> Optional[Dict]: try: decoded_token = jwt.decode(token, options={"verify_signature": False}) except Exception as e: - raise ApiError( - error_code="invalid_token", message="Cannot decode token." - ) from e + raise ApiError(error_code="invalid_token", message="Cannot decode token.") from e else: if "token_type" in decoded_token or "iss" in decoded_token: return decoded_token else: - current_app.logger.error( - f"Unknown token type in get_decoded_token: token: {token}" - ) + current_app.logger.error(f"Unknown token type in get_decoded_token: token: {token}") raise ApiError( error_code="unknown_token", message="Unknown token type in get_decoded_token", @@ -397,9 +356,7 @@ def get_user_from_decoded_internal_token(decoded_token: dict) -> Optional[UserMo service = parts[0].split(":")[1] service_id = parts[1].split(":")[1] user: UserModel = ( - UserModel.query.filter(UserModel.service == service) - .filter(UserModel.service_id == service_id) - .first() + UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first() ) if user: return user diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user_blueprint.py index e8807c77..0ce375b5 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user_blueprint.py @@ -98,11 +98,7 @@ def create_group(group_name: str) -> flask.wrappers.Response: try: db.session.add(group) except IntegrityError as exception: - raise ( - ApiError( - error_code="integrity_error", message=repr(exception), status_code=500 - ) - ) from exception + raise (ApiError(error_code="integrity_error", message=repr(exception), status_code=500)) from exception db.session.commit() return Response(json.dumps({"id": group.id}), status=201, mimetype=APPLICATION_JSON) @@ -133,9 +129,7 @@ def assign_user_to_group() -> flask.wrappers.Response: user = get_user_from_request() group = get_group_from_request() - user_group_assignment = UserGroupAssignmentModel.query.filter_by( - user_id=user.id, group_id=group.id - ).first() + user_group_assignment = UserGroupAssignmentModel.query.filter_by(user_id=user.id, group_id=group.id).first() if user_group_assignment is not None: raise ( ApiError( @@ -162,9 +156,7 @@ def remove_user_from_group() -> flask.wrappers.Response: user = get_user_from_request() group = get_group_from_request() - user_group_assignment = UserGroupAssignmentModel.query.filter_by( - user_id=user.id, group_id=group.id - ).first() + user_group_assignment = UserGroupAssignmentModel.query.filter_by(user_id=user.id, group_id=group.id).first() if user_group_assignment is None: raise ( ApiError( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/users_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/users_controller.py index 84635af2..c2a83286 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/users_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/users_controller.py @@ -24,9 +24,6 @@ def user_group_list_for_current_user() -> flask.wrappers.Response: groups = g.user.groups # TODO: filter out the default group and have a way to know what is the default group group_identifiers = [ - i.identifier - for i in groups - if i.identifier - != current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"] + i.identifier for i in groups if i.identifier != current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"] ] return make_response(jsonify(sorted(group_identifiers)), 200) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py index 6d65ea75..a650cb48 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py @@ -36,15 +36,10 @@ class DeleteProcessInstancesWithCriteria(Script): delete_criteria.append( (ProcessInstanceModel.process_model_identifier == criteria["name"]) & ProcessInstanceModel.status.in_(criteria["status"]) # type: ignore - & ( - ProcessInstanceModel.updated_at_in_seconds - < (delete_time - criteria["last_updated_delta"]) - ) + & (ProcessInstanceModel.updated_at_in_seconds < (delete_time - criteria["last_updated_delta"])) ) - results = ( - ProcessInstanceModel.query.filter(or_(*delete_criteria)).limit(100).all() - ) + results = ProcessInstanceModel.query.filter(or_(*delete_criteria)).limit(100).all() rows_affected = len(results) if rows_affected > 0: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/fact_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/fact_service.py index c739d15a..f429c047 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/fact_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/fact_service.py @@ -20,12 +20,7 @@ class FactService(Script): return """Just your basic class that can pull in data from a few api endpoints and do a basic task.""" - def run( - self, - script_attributes_context: ScriptAttributesContext, - *args: Any, - **kwargs: Any - ) -> Any: + def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> Any: """Run.""" if "type" not in kwargs: raise Exception("Please specify a 'type' of fact as a keyword argument.") @@ -35,10 +30,7 @@ class FactService(Script): if fact == "cat": details = "The cat in the hat" # self.get_cat() elif fact == "norris": - details = ( - "Chuck Norris doesn’t read books. He stares them down until he gets the" - " information he wants." - ) + details = "Chuck Norris doesn’t read books. He stares them down until he gets the information he wants." elif fact == "buzzword": details = "Move the Needle." # self.get_buzzword() else: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_all_permissions.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_all_permissions.py index e2ab0763..87f3a88c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_all_permissions.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_all_permissions.py @@ -34,8 +34,7 @@ class GetAllPermissions(Script): .join(GroupModel, GroupModel.id == PrincipalModel.group_id) .join( PermissionTargetModel, - PermissionTargetModel.id - == PermissionAssignmentModel.permission_target_id, + PermissionTargetModel.id == PermissionAssignmentModel.permission_target_id, ) .add_columns( PermissionAssignmentModel.permission, @@ -46,9 +45,7 @@ class GetAllPermissions(Script): permissions: OrderedDict[tuple[str, str], list[str]] = OrderedDict() for pa in permission_assignments: - permissions.setdefault((pa.group_identifier, pa.uri), []).append( - pa.permission - ) + permissions.setdefault((pa.group_identifier, pa.uri), []).append(pa.permission) def replace_suffix(string: str, old: str, new: str) -> str: """Replace_suffix.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py index 9921c18b..b3d251d3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py @@ -20,12 +20,7 @@ class GetCurrentUser(Script): """Get_description.""" return """Return the current user.""" - def run( - self, - script_attributes_context: ScriptAttributesContext, - *_args: Any, - **kwargs: Any - ) -> Any: + def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any: """Run.""" # dump the user using our json encoder and then load it back up as a dict # to remove unwanted field types diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_data_sizes.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_data_sizes.py index 5f28e2d6..a6247675 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_data_sizes.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_data_sizes.py @@ -27,12 +27,7 @@ class GetDataSizes(Script): return """Returns a dictionary of information about the size of task data and the python environment for the currently running process.""" - def run( - self, - script_attributes_context: ScriptAttributesContext, - *_args: Any, - **kwargs: Any - ) -> Any: + def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any: """Run.""" if script_attributes_context.task is None: raise TaskNotGivenToScriptError( @@ -42,8 +37,7 @@ class GetDataSizes(Script): workflow = script_attributes_context.task.workflow task_data_size = ProcessInstanceProcessor.get_task_data_size(workflow) task_data_keys_by_task = { - t.task_spec.name: sorted(t.data.keys()) - for t in ProcessInstanceProcessor.get_tasks_with_data(workflow) + t.task_spec.name: sorted(t.data.keys()) for t in ProcessInstanceProcessor.get_tasks_with_data(workflow) } python_env_size = ProcessInstanceProcessor.get_python_env_size(workflow) python_env_keys = workflow.script_engine.environment.user_defined_state().keys() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_encoded_file_data.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_encoded_file_data.py index e01bd7c0..9cc442b3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_encoded_file_data.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_encoded_file_data.py @@ -42,8 +42,6 @@ class GetEncodedFileData(Script): ).first() base64_value = base64.b64encode(file_data.contents).decode("ascii") - encoded_file_data = ( - f"data:{file_data.mimetype};name={file_data.filename};base64,{base64_value}" - ) + encoded_file_data = f"data:{file_data.mimetype};name={file_data.filename};base64,{base64_value}" return encoded_file_data diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_env.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_env.py index 7a6b0f44..94dd1e44 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_env.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_env.py @@ -19,11 +19,6 @@ class GetEnv(Script): """Get_description.""" return """Returns the current environment - ie testing, staging, production.""" - def run( - self, - script_attributes_context: ScriptAttributesContext, - *_args: Any, - **kwargs: Any - ) -> Any: + def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any: """Run.""" return script_attributes_context.environment_identifier diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_frontend_url.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_frontend_url.py index 503b9584..7b981bac 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_frontend_url.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_frontend_url.py @@ -21,11 +21,6 @@ class GetFrontendUrl(Script): """Get_description.""" return """Return the url to the frontend.""" - def run( - self, - script_attributes_context: ScriptAttributesContext, - *args: Any, - **kwargs: Any - ) -> Any: + def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> Any: """Run.""" return current_app.config["SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND"] diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_group_members.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_group_members.py index 0f20fbb3..8b179a6d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_group_members.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_group_members.py @@ -32,8 +32,7 @@ class GetGroupMembers(Script): group = GroupModel.query.filter_by(identifier=group_identifier).first() if group is None: raise GroupNotFoundError( - "Script 'get_group_members' could not find group with identifier" - f" '{group_identifier}'." + f"Script 'get_group_members' could not find group with identifier '{group_identifier}'." ) usernames = [u.username for u in group.users] diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_localtime.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_localtime.py index e16ee23b..f013a796 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_localtime.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_localtime.py @@ -24,12 +24,7 @@ class GetLocaltime(Script): return """Converts a Datetime object into a Datetime object for a specific timezone. Defaults to US/Eastern.""" - def run( - self, - script_attributes_context: ScriptAttributesContext, - *args: Any, - **kwargs: Any - ) -> datetime: + def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> datetime: """Run.""" if len(args) > 0 or "datetime" in kwargs: if "datetime" in kwargs: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_process_info.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_process_info.py index 99eb4ce2..755e1bfb 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_process_info.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_process_info.py @@ -19,16 +19,9 @@ class GetProcessInfo(Script): """Get_description.""" return """Returns a dictionary of information about the currently running process.""" - def run( - self, - script_attributes_context: ScriptAttributesContext, - *_args: Any, - **kwargs: Any - ) -> Any: + def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any: """Run.""" return { "process_instance_id": script_attributes_context.process_instance_id, - "process_model_identifier": ( - script_attributes_context.process_model_identifier - ), + "process_model_identifier": script_attributes_context.process_model_identifier, } diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_process_initiator_user.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_process_initiator_user.py index 266fa57b..2f6833be 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_process_initiator_user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_process_initiator_user.py @@ -26,9 +26,7 @@ class GetProcessInitiatorUser(Script): ) -> Any: """Run.""" process_instance = ( - ProcessInstanceModel.query.filter_by( - id=script_attributes_context.process_instance_id - ) + ProcessInstanceModel.query.filter_by(id=script_attributes_context.process_instance_id) .join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) .first() ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_secret.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_secret.py index 1715b6a1..74fb8642 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_secret.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_secret.py @@ -15,11 +15,6 @@ class GetSecret(Script): """Get_description.""" return """Returns the value for a previously configured secret.""" - def run( - self, - script_attributes_context: ScriptAttributesContext, - *args: Any, - **kwargs: Any - ) -> Any: + def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> Any: """Run.""" return SecretService.get_secret(args[0]).value diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py index 1a60c407..a7a9d3b0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py @@ -37,19 +37,13 @@ class GetMarkdownFileDownloadLink(Script): label = parts[1].split("=")[1] process_model_identifier = script_attributes_context.process_model_identifier if process_model_identifier is None: - raise self.get_proces_model_identifier_is_missing_error( - "markdown_file_download_link" - ) - modified_process_model_identifier = ( - ProcessModelInfo.modify_process_identifier_for_path_param( - process_model_identifier - ) + raise self.get_proces_model_identifier_is_missing_error("markdown_file_download_link") + modified_process_model_identifier = ProcessModelInfo.modify_process_identifier_for_path_param( + process_model_identifier ) process_instance_id = script_attributes_context.process_instance_id if process_instance_id is None: - raise self.get_proces_instance_id_is_missing_error( - "save_process_instance_metadata" - ) + raise self.get_proces_instance_id_is_missing_error("save_process_instance_metadata") url = current_app.config["SPIFFWORKFLOW_BACKEND_URL"] url += ( f"/v1.0/process-data-file-download/{modified_process_model_identifier}/" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/save_process_instance_metadata.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/save_process_instance_metadata.py index 19cb34fd..8fe8d3e8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/save_process_instance_metadata.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/save_process_instance_metadata.py @@ -27,9 +27,7 @@ class SaveProcessInstanceMetadata(Script): """Run.""" metadata_dict = args[0] if script_attributes_context.process_instance_id is None: - raise self.get_proces_instance_id_is_missing_error( - "save_process_instance_metadata" - ) + raise self.get_proces_instance_id_is_missing_error("save_process_instance_metadata") for key, value in metadata_dict.items(): pim = ProcessInstanceMetadataModel.query.filter_by( process_instance_id=script_attributes_context.process_instance_id, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/script.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/script.py index 12b7fdc7..757989c2 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/script.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/script.py @@ -52,14 +52,11 @@ class Script: """Run.""" raise ApiError( "invalid_script", - "This is an internal error. The script you are trying to execute '%s' " - % self.__class__.__name__ + "This is an internal error. The script you are trying to execute '%s' " % self.__class__.__name__ + "does not properly implement the run function.", ) - def get_proces_instance_id_is_missing_error( - self, script_name: str - ) -> ProcessInstanceIdMissingError: + def get_proces_instance_id_is_missing_error(self, script_name: str) -> ProcessInstanceIdMissingError: """Return the error so we can raise it from the script and mypy will be happy.""" raise ProcessInstanceIdMissingError( "The process instance id was not given to script" @@ -67,9 +64,7 @@ class Script: " within the context of a process instance." ) - def get_proces_model_identifier_is_missing_error( - self, script_name: str - ) -> ProcessModelIdentifierMissingError: + def get_proces_model_identifier_is_missing_error(self, script_name: str) -> ProcessModelIdentifierMissingError: """Return the error so we can raise it from the script and mypy will be happy.""" return ProcessModelIdentifierMissingError( "The process model identifier was not given to script" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py index b637ac13..a7519a45 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py @@ -56,9 +56,7 @@ class AuthenticationProviderTypes(enum.Enum): class AuthenticationService: """AuthenticationService.""" - ENDPOINT_CACHE: dict = ( - {} - ) # We only need to find the openid endpoints once, then we can cache them. + ENDPOINT_CACHE: dict = {} # We only need to find the openid endpoints once, then we can cache them. @staticmethod def client_id() -> str: @@ -73,9 +71,7 @@ class AuthenticationService: @staticmethod def secret_key() -> str: """Returns the secret key from the config.""" - return current_app.config.get( - "SPIFFWORKFLOW_BACKEND_OPEN_ID_CLIENT_SECRET_KEY", "" - ) + return current_app.config.get("SPIFFWORKFLOW_BACKEND_OPEN_ID_CLIENT_SECRET_KEY", "") @classmethod def open_id_endpoint_for_name(cls, name: str) -> str: @@ -85,10 +81,7 @@ class AuthenticationService: response = requests.get(openid_config_url) AuthenticationService.ENDPOINT_CACHE = response.json() if name not in AuthenticationService.ENDPOINT_CACHE: - raise Exception( - f"Unknown OpenID Endpoint: {name}. Tried to get from" - f" {openid_config_url}" - ) + raise Exception(f"Unknown OpenID Endpoint: {name}. Tried to get from {openid_config_url}") return AuthenticationService.ENDPOINT_CACHE.get(name, "") @staticmethod @@ -114,9 +107,7 @@ class AuthenticationService: state = base64.b64encode(bytes(str({"redirect_url": redirect_url}), "UTF-8")) return state - def get_login_redirect_url( - self, state: str, redirect_url: str = "/v1.0/login_return" - ) -> str: + def get_login_redirect_url(self, state: str, redirect_url: str = "/v1.0/login_return") -> str: """Get_login_redirect_url.""" return_redirect_url = f"{self.get_backend_url()}{redirect_url}" login_redirect_url = ( @@ -129,9 +120,7 @@ class AuthenticationService: ) return login_redirect_url - def get_auth_token_object( - self, code: str, redirect_url: str = "/v1.0/login_return" - ) -> dict: + def get_auth_token_object(self, code: str, redirect_url: str = "/v1.0/login_return") -> dict: """Get_auth_token_object.""" backend_basic_auth_string = f"{self.client_id()}:{self.secret_key()}" backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii") @@ -174,9 +163,7 @@ class AuthenticationService: audience_array_in_token = aud if isinstance(aud, str): audience_array_in_token = [aud] - overlapping_aud_values = [ - x for x in audience_array_in_token if x in valid_audience_values - ] + overlapping_aud_values = [x for x in audience_array_in_token if x in valid_audience_values] if iss != cls.server_url(): valid = False @@ -211,15 +198,11 @@ class AuthenticationService: @staticmethod def store_refresh_token(user_id: int, refresh_token: str) -> None: """Store_refresh_token.""" - refresh_token_model = RefreshTokenModel.query.filter( - RefreshTokenModel.user_id == user_id - ).first() + refresh_token_model = RefreshTokenModel.query.filter(RefreshTokenModel.user_id == user_id).first() if refresh_token_model: refresh_token_model.token = refresh_token else: - refresh_token_model = RefreshTokenModel( - user_id=user_id, token=refresh_token - ) + refresh_token_model = RefreshTokenModel(user_id=user_id, token=refresh_token) db.session.add(refresh_token_model) try: db.session.commit() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py index 1e7c3ee9..f01a574f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py @@ -108,9 +108,7 @@ class AuthorizationService: ) received_sign = auth_header.split("sha256=")[-1].strip() - secret = current_app.config[ - "SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET" - ].encode() + secret = current_app.config["SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET"].encode() expected_sign = HMAC(key=secret, msg=request.data, digestmod=sha256).hexdigest() if not compare_digest(received_sign, expected_sign): raise TokenInvalidError( @@ -118,17 +116,13 @@ class AuthorizationService: ) @classmethod - def has_permission( - cls, principals: list[PrincipalModel], permission: str, target_uri: str - ) -> bool: + def has_permission(cls, principals: list[PrincipalModel], permission: str, target_uri: str) -> bool: """Has_permission.""" principal_ids = [p.id for p in principals] target_uri_normalized = target_uri.removeprefix(V1_API_PATH_PREFIX) permission_assignments = ( - PermissionAssignmentModel.query.filter( - PermissionAssignmentModel.principal_id.in_(principal_ids) - ) + PermissionAssignmentModel.query.filter(PermissionAssignmentModel.principal_id.in_(principal_ids)) .filter_by(permission=permission) .join(PermissionTargetModel) .filter( @@ -136,10 +130,7 @@ class AuthorizationService: text(f"'{target_uri_normalized}' LIKE permission_target.uri"), # to check for exact matches as well # see test_user_can_access_base_path_when_given_wildcard_permission unit test - text( - f"'{target_uri_normalized}' =" - " replace(replace(permission_target.uri, '/%', ''), ':%', '')" - ), + text(f"'{target_uri_normalized}' = replace(replace(permission_target.uri, '/%', ''), ':%', '')"), ) ) .all() @@ -150,29 +141,21 @@ class AuthorizationService: elif permission_assignment.grant_type == "deny": return False else: - raise Exception( - f"Unknown grant type: {permission_assignment.grant_type}" - ) + raise Exception(f"Unknown grant type: {permission_assignment.grant_type}") return False @classmethod - def user_has_permission( - cls, user: UserModel, permission: str, target_uri: str - ) -> bool: + def user_has_permission(cls, user: UserModel, permission: str, target_uri: str) -> bool: """User_has_permission.""" if user.principal is None: - raise MissingPrincipalError( - f"Missing principal for user with id: {user.id}" - ) + raise MissingPrincipalError(f"Missing principal for user with id: {user.id}") principals = [user.principal] for group in user.groups: if group.principal is None: - raise MissingPrincipalError( - f"Missing principal for group with id: {group.id}" - ) + raise MissingPrincipalError(f"Missing principal for group with id: {group.id}") principals.append(group.principal) return cls.has_permission(principals, permission, target_uri) @@ -191,26 +174,19 @@ class AuthorizationService: @classmethod def associate_user_with_group(cls, user: UserModel, group: GroupModel) -> None: """Associate_user_with_group.""" - user_group_assignemnt = UserGroupAssignmentModel.query.filter_by( - user_id=user.id, group_id=group.id - ).first() + user_group_assignemnt = UserGroupAssignmentModel.query.filter_by(user_id=user.id, group_id=group.id).first() if user_group_assignemnt is None: - user_group_assignemnt = UserGroupAssignmentModel( - user_id=user.id, group_id=group.id - ) + user_group_assignemnt = UserGroupAssignmentModel(user_id=user.id, group_id=group.id) db.session.add(user_group_assignemnt) db.session.commit() @classmethod - def import_permissions_from_yaml_file( - cls, raise_if_missing_user: bool = False - ) -> DesiredPermissionDict: + def import_permissions_from_yaml_file(cls, raise_if_missing_user: bool = False) -> DesiredPermissionDict: """Import_permissions_from_yaml_file.""" if current_app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"] is None: raise ( PermissionsFileNotSetError( - "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME needs to be set in" - " order to import permissions" + "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME needs to be set in order to import permissions" ) ) @@ -234,11 +210,7 @@ class AuthorizationService: user = UserModel.query.filter_by(username=username).first() if user is None: if raise_if_missing_user: - raise ( - UserNotFoundError( - f"Could not find a user with name: {username}" - ) - ) + raise (UserNotFoundError(f"Could not find a user with name: {username}")) continue user_to_group_dict: UserToGroupDict = { "username": user.username, @@ -249,9 +221,7 @@ class AuthorizationService: permission_assignments = [] if "permissions" in permission_configs: - for _permission_identifier, permission_config in permission_configs[ - "permissions" - ].items(): + for _permission_identifier, permission_config in permission_configs["permissions"].items(): uri = permission_config["uri"] permission_target = cls.find_or_create_permission_target(uri) @@ -272,9 +242,7 @@ class AuthorizationService: user = UserModel.query.filter_by(username=username).first() if user is not None: principal = ( - PrincipalModel.query.join(UserModel) - .filter(UserModel.username == username) - .first() + PrincipalModel.query.join(UserModel).filter(UserModel.username == username).first() ) permission_assignments.append( cls.create_permission_for_principal( @@ -297,9 +265,9 @@ class AuthorizationService: """Find_or_create_permission_target.""" uri_with_percent = re.sub(r"\*", "%", uri) target_uri_normalized = uri_with_percent.removeprefix(V1_API_PATH_PREFIX) - permission_target: Optional[PermissionTargetModel] = ( - PermissionTargetModel.query.filter_by(uri=target_uri_normalized).first() - ) + permission_target: Optional[PermissionTargetModel] = PermissionTargetModel.query.filter_by( + uri=target_uri_normalized + ).first() if permission_target is None: permission_target = PermissionTargetModel(uri=target_uri_normalized) db.session.add(permission_target) @@ -314,13 +282,11 @@ class AuthorizationService: permission: str, ) -> PermissionAssignmentModel: """Create_permission_for_principal.""" - permission_assignment: Optional[PermissionAssignmentModel] = ( - PermissionAssignmentModel.query.filter_by( - principal_id=principal.id, - permission_target_id=permission_target.id, - permission=permission, - ).first() - ) + permission_assignment: Optional[PermissionAssignmentModel] = PermissionAssignmentModel.query.filter_by( + principal_id=principal.id, + permission_target_id=permission_target.id, + permission=permission, + ).first() if permission_assignment is None: permission_assignment = PermissionAssignmentModel( principal_id=principal.id, @@ -400,10 +366,7 @@ class AuthorizationService: ) api_view_function = current_app.view_functions[request.endpoint] - if ( - api_view_function - and api_view_function.__name__ in authorization_exclusion_list - ): + if api_view_function and api_view_function.__name__ in authorization_exclusion_list: return None permission_string = cls.get_permission_from_http_method(request.method) @@ -443,10 +406,7 @@ class AuthorizationService: ) from exception except jwt.InvalidTokenError as exception: raise TokenInvalidError( - ( - "The Authentication token you provided is invalid. You need a new" - " token. " - ), + "The Authentication token you provided is invalid. You need a new token. ", ) from exception @staticmethod @@ -506,9 +466,7 @@ class AuthorizationService: ): if tenant_specific_field in user_info: field_number = field_index + 1 - user_attributes[f"tenant_specific_field_{field_number}"] = user_info[ - tenant_specific_field - ] + user_attributes[f"tenant_specific_field_{field_number}"] = user_info[tenant_specific_field] # example value for service: http://localhost:7002/realms/spiffworkflow (keycloak url) user_model = ( @@ -567,9 +525,7 @@ class AuthorizationService: # 2. view the logs for these instances. if permission_set == "start": target_uri = f"/process-instances/{process_related_path_segment}" - permissions_to_assign.append( - PermissionToAssign(permission="create", target_uri=target_uri) - ) + permissions_to_assign.append(PermissionToAssign(permission="create", target_uri=target_uri)) # giving people access to all logs for an instance actually gives them a little bit more access # than would be optimal. ideally, you would only be able to view the logs for instances that you started @@ -586,28 +542,18 @@ class AuthorizationService: f"/logs/{process_related_path_segment}", f"/process-data-file-download/{process_related_path_segment}", ]: - permissions_to_assign.append( - PermissionToAssign(permission="read", target_uri=target_uri) - ) + permissions_to_assign.append(PermissionToAssign(permission="read", target_uri=target_uri)) else: if permission_set == "all": for path_segment_dict in PATH_SEGMENTS_FOR_PERMISSION_ALL: - target_uri = ( - f"{path_segment_dict['path']}/{process_related_path_segment}" - ) + target_uri = f"{path_segment_dict['path']}/{process_related_path_segment}" relevant_permissions = path_segment_dict["relevant_permissions"] for permission in relevant_permissions: - permissions_to_assign.append( - PermissionToAssign( - permission=permission, target_uri=target_uri - ) - ) + permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri=target_uri)) for target_uri in target_uris: for permission in permissions: - permissions_to_assign.append( - PermissionToAssign(permission=permission, target_uri=target_uri) - ) + permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri=target_uri)) return permissions_to_assign @@ -615,48 +561,26 @@ class AuthorizationService: def set_basic_permissions(cls) -> list[PermissionToAssign]: """Set_basic_permissions.""" permissions_to_assign: list[PermissionToAssign] = [] + permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/process-instances/for-me")) + permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/processes")) + permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/service-tasks")) + permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/user-groups/for-current-user")) permissions_to_assign.append( - PermissionToAssign( - permission="read", target_uri="/process-instances/for-me" - ) - ) - permissions_to_assign.append( - PermissionToAssign(permission="read", target_uri="/processes") - ) - permissions_to_assign.append( - PermissionToAssign(permission="read", target_uri="/service-tasks") - ) - permissions_to_assign.append( - PermissionToAssign( - permission="read", target_uri="/user-groups/for-current-user" - ) - ) - permissions_to_assign.append( - PermissionToAssign( - permission="read", target_uri="/process-instances/find-by-id/*" - ) + PermissionToAssign(permission="read", target_uri="/process-instances/find-by-id/*") ) for permission in ["create", "read", "update", "delete"]: permissions_to_assign.append( - PermissionToAssign( - permission=permission, target_uri="/process-instances/reports/*" - ) - ) - permissions_to_assign.append( - PermissionToAssign(permission=permission, target_uri="/tasks/*") + PermissionToAssign(permission=permission, target_uri="/process-instances/reports/*") ) + permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri="/tasks/*")) return permissions_to_assign @classmethod - def set_process_group_permissions( - cls, target: str, permission_set: str - ) -> list[PermissionToAssign]: + def set_process_group_permissions(cls, target: str, permission_set: str) -> list[PermissionToAssign]: """Set_process_group_permissions.""" permissions_to_assign: list[PermissionToAssign] = [] - process_group_identifier = ( - target.removeprefix("PG:").replace("/", ":").removeprefix(":") - ) + process_group_identifier = target.removeprefix("PG:").replace("/", ":").removeprefix(":") process_related_path_segment = f"{process_group_identifier}:*" if process_group_identifier == "ALL": process_related_path_segment = "*" @@ -670,14 +594,10 @@ class AuthorizationService: return permissions_to_assign @classmethod - def set_process_model_permissions( - cls, target: str, permission_set: str - ) -> list[PermissionToAssign]: + def set_process_model_permissions(cls, target: str, permission_set: str) -> list[PermissionToAssign]: """Set_process_model_permissions.""" permissions_to_assign: list[PermissionToAssign] = [] - process_model_identifier = ( - target.removeprefix("PM:").replace("/", ":").removeprefix(":") - ) + process_model_identifier = target.removeprefix("PM:").replace("/", ":").removeprefix(":") process_related_path_segment = f"{process_model_identifier}/*" if process_model_identifier == "ALL": @@ -690,9 +610,7 @@ class AuthorizationService: return permissions_to_assign @classmethod - def explode_permissions( - cls, permission_set: str, target: str - ) -> list[PermissionToAssign]: + def explode_permissions(cls, permission_set: str, target: str) -> list[PermissionToAssign]: """Explodes given permissions to and returns list of PermissionToAssign objects. These can be used to then iterate through and inserted into the database. @@ -719,30 +637,20 @@ class AuthorizationService: permissions = ["create", "read", "update", "delete"] if target.startswith("PG:"): - permissions_to_assign += cls.set_process_group_permissions( - target, permission_set - ) + permissions_to_assign += cls.set_process_group_permissions(target, permission_set) elif target.startswith("PM:"): - permissions_to_assign += cls.set_process_model_permissions( - target, permission_set - ) + permissions_to_assign += cls.set_process_model_permissions(target, permission_set) elif permission_set == "start": - raise InvalidPermissionError( - "Permission 'start' is only available for macros PM and PG." - ) + raise InvalidPermissionError("Permission 'start' is only available for macros PM and PG.") elif target.startswith("BASIC"): permissions_to_assign += cls.set_basic_permissions() elif target == "ALL": for permission in permissions: - permissions_to_assign.append( - PermissionToAssign(permission=permission, target_uri="/*") - ) + permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri="/*")) elif target.startswith("/"): for permission in permissions: - permissions_to_assign.append( - PermissionToAssign(permission=permission, target_uri=target) - ) + permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri=target)) else: raise InvalidPermissionError( f"Target uri '{target}' with permission set '{permission_set}' is" @@ -761,9 +669,7 @@ class AuthorizationService: permissions_to_assign = cls.explode_permissions(permission, target) permission_assignments = [] for permission_to_assign in permissions_to_assign: - permission_target = cls.find_or_create_permission_target( - permission_to_assign.target_uri - ) + permission_target = cls.find_or_create_permission_target(permission_to_assign.target_uri) permission_assignments.append( cls.create_permission_for_principal( group.principal, permission_target, permission_to_assign.permission @@ -789,9 +695,7 @@ class AuthorizationService: "group_identifier": group_identifier, } desired_user_to_group_identifiers.append(user_to_group_dict) - GroupService.add_user_to_group_or_add_to_waiting( - username, group_identifier - ) + GroupService.add_user_to_group_or_add_to_waiting(username, group_identifier) desired_group_identifiers.add(group_identifier) for permission in group["permissions"]: for crud_op in permission["actions"]: @@ -812,8 +716,7 @@ class AuthorizationService: # do not remove users from the default user group if ( current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"] is None - or current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"] - != iutga.group.identifier + or current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"] != iutga.group.identifier ): current_user_dict: UserToGroupDict = { "username": iutga.user.username, @@ -823,12 +726,8 @@ class AuthorizationService: db.session.delete(iutga) # do not remove the default user group - desired_group_identifiers.add( - current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"] - ) - groups_to_delete = GroupModel.query.filter( - GroupModel.identifier.not_in(desired_group_identifiers) - ).all() + desired_group_identifiers.add(current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"]) + groups_to_delete = GroupModel.query.filter(GroupModel.identifier.not_in(desired_group_identifiers)).all() for gtd in groups_to_delete: db.session.delete(gtd) db.session.commit() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/background_processing_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/background_processing_service.py index 3ce0e8f2..eabff0f9 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/background_processing_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/background_processing_service.py @@ -28,9 +28,7 @@ class BackgroundProcessingService: """Since this runs in a scheduler, we need to specify the app context as well.""" with self.app.app_context(): ProcessInstanceLockService.set_thread_local_locking_context("bg:userinput") - ProcessInstanceService.do_waiting( - ProcessInstanceStatus.user_input_required.value - ) + ProcessInstanceService.do_waiting(ProcessInstanceStatus.user_input_required.value) def process_message_instances_with_app_context(self) -> None: """Since this runs in a scheduler, we need to specify the app context as well.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/error_handling_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/error_handling_service.py index 53ceef2c..4407c6db 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/error_handling_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/error_handling_service.py @@ -25,22 +25,14 @@ class ErrorHandlingService: @staticmethod def set_instance_status(instance_id: int, status: str) -> None: """Set_instance_status.""" - instance = ( - db.session.query(ProcessInstanceModel) - .filter(ProcessInstanceModel.id == instance_id) - .first() - ) + instance = db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.id == instance_id).first() if instance: instance.status = status db.session.commit() - def handle_error( - self, _processor: ProcessInstanceProcessor, _error: Union[ApiError, Exception] - ) -> None: + def handle_error(self, _processor: ProcessInstanceProcessor, _error: Union[ApiError, Exception]) -> None: """On unhandled exceptions, set instance.status based on model.fault_or_suspend_on_exception.""" - process_model = ProcessModelService.get_process_model( - _processor.process_model_identifier - ) + process_model = ProcessModelService.get_process_model(_processor.process_model_identifier) # First, suspend or fault the instance if process_model.fault_or_suspend_on_exception == "suspend": self.set_instance_status( @@ -72,8 +64,7 @@ class ErrorHandlingService: ) -> None: """Send a BPMN Message - which may kick off a waiting process.""" message_text = ( - f"There was an exception running process {process_model.id}.\nOriginal" - f" Error:\n{error.__repr__()}" + f"There was an exception running process {process_model.id}.\nOriginal Error:\n{error.__repr__()}" ) message_payload = { "message_text": message_text, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/file_system_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/file_system_service.py index 92dcb9bb..5cad69ad 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/file_system_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/file_system_service.py @@ -84,17 +84,13 @@ class FileSystemService: @staticmethod def workflow_path(spec: ProcessModelInfo) -> str: """Workflow_path.""" - process_model_path = os.path.join( - FileSystemService.root_path(), spec.id_for_file_path() - ) + process_model_path = os.path.join(FileSystemService.root_path(), spec.id_for_file_path()) return process_model_path @staticmethod def full_path_to_process_model_file(spec: ProcessModelInfo) -> str: """Full_path_to_process_model_file.""" - return os.path.join( - FileSystemService.workflow_path(spec), spec.primary_file_name # type: ignore - ) + return os.path.join(FileSystemService.workflow_path(spec), spec.primary_file_name) # type: ignore def next_display_order(self, spec: ProcessModelInfo) -> int: """Next_display_order.""" @@ -124,8 +120,7 @@ class FileSystemService: if file_extension not in FileType.list(): raise ApiError( "unknown_extension", - "The file you provided does not have an accepted extension:" - + file_extension, + "The file you provided does not have an accepted extension:" + file_extension, status_code=404, ) @@ -173,9 +168,7 @@ class FileSystemService: content_type = CONTENT_TYPES[file_type.name] last_modified = FileSystemService._last_modified(file_path) size = os.path.getsize(file_path) - file = File.from_file_system( - file_name, file_type, content_type, last_modified, size - ) + file = File.from_file_system(file_name, file_type, content_type, last_modified, size) return file @staticmethod @@ -193,6 +186,4 @@ class FileSystemService: stats = item.stat() file_size = stats.st_size last_modified = FileSystemService._last_modified(item.path) - return File.from_file_system( - item.name, file_type, content_type, last_modified, file_size - ) + return File.from_file_system(item.name, file_type, content_type, last_modified, file_size) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py index 6ae385ad..a2c7d216 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py @@ -37,14 +37,10 @@ class GitService: @classmethod def get_current_revision(cls) -> str: """Get_current_revision.""" - bpmn_spec_absolute_dir = current_app.config[ - "SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR" - ] + bpmn_spec_absolute_dir = current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] # The value includes a carriage return character at the end, so we don't grab the last character with FileSystemService.cd(bpmn_spec_absolute_dir): - return cls.run_shell_command_to_get_stdout( - ["git", "rev-parse", "--short", "HEAD"] - ) + return cls.run_shell_command_to_get_stdout(["git", "rev-parse", "--short", "HEAD"]) @classmethod def get_instance_file_contents_for_revision( @@ -54,12 +50,8 @@ class GitService: file_name: Optional[str] = None, ) -> str: """Get_instance_file_contents_for_revision.""" - bpmn_spec_absolute_dir = current_app.config[ - "SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR" - ] - process_model_relative_path = FileSystemService.process_model_relative_path( - process_model - ) + bpmn_spec_absolute_dir = current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] + process_model_relative_path = FileSystemService.process_model_relative_path(process_model) file_name_to_use = file_name if file_name_to_use is None: file_name_to_use = process_model.primary_file_name @@ -82,22 +74,14 @@ class GitService: cls.check_for_basic_configs() branch_name_to_use = branch_name if branch_name_to_use is None: - branch_name_to_use = current_app.config[ - "SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH" - ] + branch_name_to_use = current_app.config["SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH"] repo_path_to_use = repo_path if repo_path is None: - repo_path_to_use = current_app.config[ - "SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR" - ] + repo_path_to_use = current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] if repo_path_to_use is None: - raise ConfigurationError( - "SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR config must be set" - ) + raise ConfigurationError("SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR config must be set") - shell_command_path = os.path.join( - current_app.root_path, "..", "..", "bin", "git_commit_bpmn_models_repo" - ) + shell_command_path = os.path.join(current_app.root_path, "..", "..", "bin", "git_commit_bpmn_models_repo") shell_command = [ shell_command_path, repo_path_to_use, @@ -119,10 +103,7 @@ class GitService: def check_for_publish_configs(cls) -> None: """Check_for_configs.""" cls.check_for_basic_configs() - if ( - current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"] - is None - ): + if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"] is None: raise MissingGitConfigsError( "Missing config for SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH. " "This is required for publishing process models" @@ -155,29 +136,21 @@ class GitService: ) -> Union[subprocess.CompletedProcess[bytes], bool]: """Run_shell_command.""" my_env = os.environ.copy() - my_env["GIT_COMMITTER_NAME"] = ( - current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_USERNAME") or "unknown" - ) + my_env["GIT_COMMITTER_NAME"] = current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_USERNAME") or "unknown" my_env["GIT_COMMITTER_EMAIL"] = ( - current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL") - or "unknown@example.org" + current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL") or "unknown@example.org" ) # SSH authentication can be also provided via gitconfig. - ssh_key_path = current_app.config.get( - "SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH" - ) + ssh_key_path = current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH") if ssh_key_path is not None: my_env["GIT_SSH_COMMAND"] = ( - "ssh -F /dev/null -o UserKnownHostsFile=/dev/null -o" - " StrictHostKeyChecking=no -i %s" % ssh_key_path + "ssh -F /dev/null -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i %s" % ssh_key_path ) # this is fine since we pass the commands directly - result = subprocess.run( # noqa - command, check=False, capture_output=True, env=my_env - ) + result = subprocess.run(command, check=False, capture_output=True, env=my_env) # noqa if return_success_state: return result.returncode == 0 @@ -185,11 +158,7 @@ class GitService: if result.returncode != 0: stdout = result.stdout.decode("utf-8") stderr = result.stderr.decode("utf-8") - raise GitCommandError( - f"Failed to execute git command: {command}" - f"Stdout: {stdout}" - f"Stderr: {stderr}" - ) + raise GitCommandError(f"Failed to execute git command: {command}Stdout: {stdout}Stderr: {stderr}") return result @@ -201,19 +170,16 @@ class GitService: if "repository" not in webhook or "clone_url" not in webhook["repository"]: raise InvalidGitWebhookBodyError( - "Cannot find required keys of 'repository:clone_url' from webhook" - f" body: {webhook}" + f"Cannot find required keys of 'repository:clone_url' from webhook body: {webhook}" ) - config_clone_url = current_app.config[ - "SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL" - ] + config_clone_url = current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL"] repo = webhook["repository"] valid_clone_urls = [repo["clone_url"], repo["git_url"], repo["ssh_url"]] if config_clone_url not in valid_clone_urls: raise GitCloneUrlMismatchError( - "Configured clone url does not match the repo URLs from webhook: %s" - " =/= %s" % (config_clone_url, valid_clone_urls) + "Configured clone url does not match the repo URLs from webhook: %s =/= %s" + % (config_clone_url, valid_clone_urls) ) # Test webhook requests have a zen koan and hook info. @@ -221,9 +187,7 @@ class GitService: return False if "ref" not in webhook: - raise InvalidGitWebhookBodyError( - f"Could not find the 'ref' arg in the webhook boy: {webhook}" - ) + raise InvalidGitWebhookBodyError(f"Could not find the 'ref' arg in the webhook boy: {webhook}") if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH"] is None: raise MissingGitConfigsError( @@ -236,9 +200,7 @@ class GitService: if ref != f"refs/heads/{git_branch}": return False - with FileSystemService.cd( - current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] - ): + with FileSystemService.cd(current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"]): cls.run_shell_command(["git", "pull", "--rebase"]) return True @@ -247,9 +209,7 @@ class GitService: """Publish.""" cls.check_for_publish_configs() source_process_model_root = FileSystemService.root_path() - source_process_model_path = os.path.join( - source_process_model_root, process_model_id - ) + source_process_model_path = os.path.join(source_process_model_root, process_model_id) unique_hex = uuid.uuid4().hex clone_dir = f"sample-process-models.{unique_hex}" @@ -257,9 +217,7 @@ class GitService: # we are adding a guid to this so the flake8 issue has been mitigated destination_process_root = f"/tmp/{clone_dir}" # noqa - git_clone_url = current_app.config[ - "SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL" - ] + git_clone_url = current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL"] cmd = ["git", "clone", git_clone_url, destination_process_root] cls.run_shell_command(cmd) @@ -281,9 +239,7 @@ class GitService: cls.run_shell_command(["git", "checkout", "-b", branch_to_pull_request]) # copy files from process model into the new publish branch - destination_process_model_path = os.path.join( - destination_process_root, process_model_id - ) + destination_process_model_path = os.path.join(destination_process_root, process_model_id) if os.path.exists(destination_process_model_path): shutil.rmtree(destination_process_model_path) shutil.copytree(source_process_model_path, destination_process_model_path) @@ -296,9 +252,7 @@ class GitService: cls.commit(commit_message, destination_process_root, branch_to_pull_request) # build url for github page to open PR - git_remote = cls.run_shell_command_to_get_stdout( - ["git", "config", "--get", "remote.origin.url"] - ) + git_remote = cls.run_shell_command_to_get_stdout(["git", "config", "--get", "remote.origin.url"]) remote_url = git_remote.strip().replace(".git", "") pr_url = f"{remote_url}/compare/{branch_to_update}...{branch_to_pull_request}?expand=1" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/group_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/group_service.py index abc11151..eee47bc6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/group_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/group_service.py @@ -13,9 +13,7 @@ class GroupService: @classmethod def find_or_create_group(cls, group_identifier: str) -> GroupModel: """Find_or_create_group.""" - group: Optional[GroupModel] = GroupModel.query.filter_by( - identifier=group_identifier - ).first() + group: Optional[GroupModel] = GroupModel.query.filter_by(identifier=group_identifier).first() if group is None: group = GroupModel(identifier=group_identifier) db.session.add(group) @@ -24,9 +22,7 @@ class GroupService: return group @classmethod - def add_user_to_group_or_add_to_waiting( - cls, username: str, group_identifier: str - ) -> None: + def add_user_to_group_or_add_to_waiting(cls, username: str, group_identifier: str) -> None: """Add_user_to_group_or_add_to_waiting.""" group = cls.find_or_create_group(group_identifier) user = UserModel.query.filter_by(username=username).first() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py index 77adeaf3..e2f58e29 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py @@ -63,10 +63,7 @@ class JsonFormatter(logging.Formatter): KeyError is raised if an unknown attribute is provided in the fmt_dict. """ - return { - fmt_key: record.__dict__[fmt_val] - for fmt_key, fmt_val in self.fmt_dict.items() - } + return {fmt_key: record.__dict__[fmt_val] for fmt_key, fmt_val in self.fmt_dict.items()} def format(self, record: logging.LogRecord) -> str: """Mostly the same as the parent's class method. @@ -124,15 +121,12 @@ def setup_logger(app: Flask) -> None: if upper_log_level_string not in log_levels: raise InvalidLogLevelError( - f"Log level given is invalid: '{upper_log_level_string}'. Valid options are" - f" {log_levels}" + f"Log level given is invalid: '{upper_log_level_string}'. Valid options are {log_levels}" ) log_level = getattr(logging, upper_log_level_string) spiff_log_level = getattr(logging, upper_log_level_string) - log_formatter = logging.Formatter( - "%(asctime)s - %(name)s - %(levelname)s - %(message)s" - ) + log_formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") app.logger.debug("Printing log to create app logger") @@ -235,9 +229,7 @@ class DBHandler(logging.Handler): message = record.msg if hasattr(record, "msg") else None current_user_id = None - if bpmn_task_type in Task.HUMAN_TASK_TYPES and hasattr( - record, "current_user_id" - ): + if bpmn_task_type in Task.HUMAN_TASK_TYPES and hasattr(record, "current_user_id"): current_user_id = record.current_user_id # type: ignore spiff_step = ( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/message_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/message_service.py index d260f53d..7e52561e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/message_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/message_service.py @@ -28,9 +28,7 @@ class MessageService: """MessageService.""" @classmethod - def correlate_send_message( - cls, message_instance_send: MessageInstanceModel - ) -> Optional[MessageInstanceModel]: + def correlate_send_message(cls, message_instance_send: MessageInstanceModel) -> Optional[MessageInstanceModel]: """Connects the given send message to a 'receive' message if possible. :param message_instance_send: @@ -52,18 +50,14 @@ class MessageService: message_instance_receive: Optional[MessageInstanceModel] = None try: for message_instance in available_receive_messages: - if message_instance.correlates( - message_instance_send, CustomBpmnScriptEngine() - ): + if message_instance.correlates(message_instance_send, CustomBpmnScriptEngine()): message_instance_receive = message_instance if message_instance_receive is None: # Check for a message triggerable process and start that to create a new message_instance_receive - message_triggerable_process_model = ( - MessageTriggerableProcessModel.query.filter_by( - message_name=message_instance_send.name - ).first() - ) + message_triggerable_process_model = MessageTriggerableProcessModel.query.filter_by( + message_name=message_instance_send.name + ).first() if message_triggerable_process_model: receiving_process = MessageService.start_process_with_message( message_triggerable_process_model, message_instance_send @@ -74,17 +68,10 @@ class MessageService: status="ready", ).first() else: - receiving_process = ( - MessageService.get_process_instance_for_message_instance( - message_instance_receive - ) - ) + receiving_process = MessageService.get_process_instance_for_message_instance(message_instance_receive) # Assure we can send the message, otherwise keep going. - if ( - message_instance_receive is None - or not receiving_process.can_receive_message() - ): + if message_instance_receive is None or not receiving_process.can_receive_message(): message_instance_send.status = "ready" message_instance_send.status = "ready" db.session.add(message_instance_send) @@ -124,9 +111,7 @@ class MessageService: @classmethod def correlate_all_message_instances(cls) -> None: """Look at ALL the Send and Receive Messages and attempt to find correlations.""" - message_instances_send = MessageInstanceModel.query.filter_by( - message_type="send", status="ready" - ).all() + message_instances_send = MessageInstanceModel.query.filter_by(message_type="send", status="ready").all() for message_instance_send in message_instances_send: cls.correlate_send_message(message_instance_send) @@ -150,11 +135,9 @@ class MessageService: message_instance_receive: MessageInstanceModel, ) -> ProcessInstanceModel: """Process_message_receive.""" - process_instance_receive: ProcessInstanceModel = ( - ProcessInstanceModel.query.filter_by( - id=message_instance_receive.process_instance_id - ).first() - ) + process_instance_receive: ProcessInstanceModel = ProcessInstanceModel.query.filter_by( + id=message_instance_receive.process_instance_id + ).first() if process_instance_receive is None: raise MessageServiceError( ( @@ -176,9 +159,7 @@ class MessageService: ) -> None: """process_message_receive.""" processor_receive = ProcessInstanceProcessor(process_instance_receive) - processor_receive.bpmn_process_instance.catch_bpmn_message( - message_model_name, message_payload - ) + processor_receive.bpmn_process_instance.catch_bpmn_message(message_model_name, message_payload) processor_receive.do_engine_steps(save=True) message_instance_receive.status = MessageStatuses.completed.value db.session.add(message_instance_receive) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_lock_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_lock_service.py index 5c3cd935..866c073b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_lock_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_lock_service.py @@ -35,9 +35,7 @@ class ProcessInstanceLockService: return f"{ctx['domain']}:{ctx['uuid']}:{ctx['thread_id']}" @classmethod - def lock( - cls, process_instance_id: int, queue_entry: ProcessInstanceQueueModel - ) -> None: + def lock(cls, process_instance_id: int, queue_entry: ProcessInstanceQueueModel) -> None: ctx = cls.get_thread_local_locking_context() ctx["locks"][process_instance_id] = queue_entry @@ -55,9 +53,7 @@ class ProcessInstanceLockService: return ctx["locks"].pop(process_instance_id) # type: ignore @classmethod - def try_unlock( - cls, process_instance_id: int - ) -> Optional[ProcessInstanceQueueModel]: + def try_unlock(cls, process_instance_id: int) -> Optional[ProcessInstanceQueueModel]: ctx = cls.get_thread_local_locking_context() return ctx["locks"].pop(process_instance_id, None) # type: ignore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 89cea4ae..6c8b64fc 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -171,9 +171,7 @@ class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # ty super().execute(script, context, external_methods) self._last_result = context - def user_defined_state( - self, external_methods: Optional[Dict[str, Any]] = None - ) -> Dict[str, Any]: + def user_defined_state(self, external_methods: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: return {} def last_result(self) -> Dict[str, Any]: @@ -201,9 +199,7 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment) def __init__(self, environment_globals: Dict[str, Any]): """NonTaskDataBasedScriptEngineEnvironment.""" self.state: Dict[str, Any] = {} - self.non_user_defined_keys = set( - [*environment_globals.keys()] + ["__builtins__"] - ) + self.non_user_defined_keys = set([*environment_globals.keys()] + ["__builtins__"]) super().__init__(environment_globals) def evaluate( @@ -249,18 +245,12 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment) # the state will be removed later once the task is completed. context.update(self.state) - def user_defined_state( - self, external_methods: Optional[Dict[str, Any]] = None - ) -> Dict[str, Any]: + def user_defined_state(self, external_methods: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: keys_to_filter = self.non_user_defined_keys if external_methods is not None: keys_to_filter |= set(external_methods.keys()) - return { - k: v - for k, v in self.state.items() - if k not in keys_to_filter and not callable(v) - } + return {k: v for k, v in self.state.items() if k not in keys_to_filter and not callable(v)} def last_result(self) -> Dict[str, Any]: return {k: v for k, v in self.state.items()} @@ -286,9 +276,7 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment) state_keys_to_remove = state_keys - task_data_keys task_data_keys_to_keep = task_data_keys - state_keys - self.state = { - k: v for k, v in self.state.items() if k not in state_keys_to_remove - } + self.state = {k: v for k, v in self.state.items() if k not in state_keys_to_remove} task.data = {k: v for k, v in task.data.items() if k in task_data_keys_to_keep} if hasattr(task.task_spec, "_result_variable"): @@ -380,20 +368,16 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore except Exception as exception: if task is None: raise WorkflowException( - "Error evaluating expression: '%s', %s" - % (expression, str(exception)), + f"Error evaluating expression: '{expression}', {str(exception)}", ) from exception else: raise WorkflowTaskException( - "Error evaluating expression '%s', %s" - % (expression, str(exception)), + f"Error evaluating expression '{expression}', {str(exception)}", task=task, exception=exception, ) from exception - def execute( - self, task: SpiffTask, script: str, external_methods: Any = None - ) -> None: + def execute(self, task: SpiffTask, script: str, external_methods: Any = None) -> None: """Execute.""" try: methods = self.__get_augment_methods(task) @@ -412,14 +396,10 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore task_data: Dict[str, Any], ) -> Any: """CallService.""" - return ServiceTaskDelegate.call_connector( - operation_name, operation_params, task_data - ) + return ServiceTaskDelegate.call_connector(operation_name, operation_params, task_data) -IdToBpmnProcessSpecMapping = NewType( - "IdToBpmnProcessSpecMapping", dict[str, BpmnProcessSpec] -) +IdToBpmnProcessSpecMapping = NewType("IdToBpmnProcessSpecMapping", dict[str, BpmnProcessSpec]) class ProcessInstanceProcessor: @@ -428,9 +408,7 @@ class ProcessInstanceProcessor: _script_engine = CustomBpmnScriptEngine() SERIALIZER_VERSION = "1.0-spiffworkflow-backend" - wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter( - SPIFF_SPEC_CONFIG - ) + wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(SPIFF_SPEC_CONFIG) _serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION) _event_serializer = EventBasedGatewayConverter(wf_spec_converter) @@ -440,9 +418,7 @@ class ProcessInstanceProcessor: # __init__ calls these helpers: # * get_spec, which returns a spec and any subprocesses (as IdToBpmnProcessSpecMapping dict) # * __get_bpmn_process_instance, which takes spec and subprocesses and instantiates and returns a BpmnWorkflow - def __init__( - self, process_instance_model: ProcessInstanceModel, validate_only: bool = False - ) -> None: + def __init__(self, process_instance_model: ProcessInstanceModel, validate_only: bool = False) -> None: """Create a Workflow Processor based on the serialized information available in the process_instance model.""" tld = current_app.config["THREAD_LOCAL_DATA"] tld.process_instance_id = process_instance_model.id @@ -476,9 +452,7 @@ class ProcessInstanceProcessor: ) self.process_model_identifier = process_instance_model.process_model_identifier - self.process_model_display_name = ( - process_instance_model.process_model_display_name - ) + self.process_model_display_name = process_instance_model.process_model_display_name try: ( @@ -496,10 +470,7 @@ class ProcessInstanceProcessor: except MissingSpecError as ke: raise ApiError( error_code="unexpected_process_instance_structure", - message=( - "Failed to deserialize process_instance" - " '%s' due to a mis-placed or missing task '%s'" - ) + message="Failed to deserialize process_instance '%s' due to a mis-placed or missing task '%s'" % (self.process_model_identifier, str(ke)), ) from ke @@ -508,45 +479,32 @@ class ProcessInstanceProcessor: cls, process_model_identifier: str ) -> Tuple[BpmnProcessSpec, IdToBpmnProcessSpecMapping]: """Get_process_model_and_subprocesses.""" - process_model_info = ProcessModelService.get_process_model( - process_model_identifier - ) + process_model_info = ProcessModelService.get_process_model(process_model_identifier) if process_model_info is None: raise ( ApiError( "process_model_not_found", - ( - "The given process model was not found:" - f" {process_model_identifier}." - ), + f"The given process model was not found: {process_model_identifier}.", ) ) spec_files = SpecFileService.get_files(process_model_info) return cls.get_spec(spec_files, process_model_info) @classmethod - def get_bpmn_process_instance_from_process_model( - cls, process_model_identifier: str - ) -> BpmnWorkflow: + def get_bpmn_process_instance_from_process_model(cls, process_model_identifier: str) -> BpmnWorkflow: """Get_all_bpmn_process_identifiers_for_process_model.""" (bpmn_process_spec, subprocesses) = cls.get_process_model_and_subprocesses( process_model_identifier, ) - return cls.get_bpmn_process_instance_from_workflow_spec( - bpmn_process_spec, subprocesses - ) + return cls.get_bpmn_process_instance_from_workflow_spec(bpmn_process_spec, subprocesses) @staticmethod def set_script_engine(bpmn_process_instance: BpmnWorkflow) -> None: - ProcessInstanceProcessor._script_engine.environment.restore_state( - bpmn_process_instance - ) + ProcessInstanceProcessor._script_engine.environment.restore_state(bpmn_process_instance) bpmn_process_instance.script_engine = ProcessInstanceProcessor._script_engine def preserve_script_engine_state(self) -> None: - ProcessInstanceProcessor._script_engine.environment.preserve_state( - self.bpmn_process_instance - ) + ProcessInstanceProcessor._script_engine.environment.preserve_state(self.bpmn_process_instance) @classmethod def _update_bpmn_definition_mappings( @@ -555,16 +513,11 @@ class ProcessInstanceProcessor: bpmn_process_definition_identifier: str, task_definition: TaskDefinitionModel, ) -> None: - if ( - bpmn_process_definition_identifier - not in bpmn_definition_to_task_definitions_mappings - ): - bpmn_definition_to_task_definitions_mappings[ - bpmn_process_definition_identifier - ] = {} - bpmn_definition_to_task_definitions_mappings[ - bpmn_process_definition_identifier - ][task_definition.bpmn_identifier] = task_definition + if bpmn_process_definition_identifier not in bpmn_definition_to_task_definitions_mappings: + bpmn_definition_to_task_definitions_mappings[bpmn_process_definition_identifier] = {} + bpmn_definition_to_task_definitions_mappings[bpmn_process_definition_identifier][ + task_definition.bpmn_identifier + ] = task_definition @classmethod def _get_definition_dict_for_bpmn_process_definition( @@ -608,18 +561,14 @@ class ProcessInstanceProcessor: bpmn_subprocess_definition_bpmn_identifiers = {} for bpmn_subprocess_definition in bpmn_process_subprocess_definitions: - bpmn_process_definition_dict: dict = ( - bpmn_subprocess_definition.properties_json - ) + bpmn_process_definition_dict: dict = bpmn_subprocess_definition.properties_json spiff_bpmn_process_dict["subprocess_specs"][ bpmn_subprocess_definition.bpmn_identifier ] = bpmn_process_definition_dict - spiff_bpmn_process_dict["subprocess_specs"][ + spiff_bpmn_process_dict["subprocess_specs"][bpmn_subprocess_definition.bpmn_identifier]["task_specs"] = {} + bpmn_subprocess_definition_bpmn_identifiers[bpmn_subprocess_definition.id] = ( bpmn_subprocess_definition.bpmn_identifier - ]["task_specs"] = {} - bpmn_subprocess_definition_bpmn_identifiers[ - bpmn_subprocess_definition.id - ] = bpmn_subprocess_definition.bpmn_identifier + ) task_definitions = TaskDefinitionModel.query.filter( TaskDefinitionModel.bpmn_process_definition_id.in_( # type: ignore @@ -627,29 +576,21 @@ class ProcessInstanceProcessor: ) ).all() for task_definition in task_definitions: - bpmn_subprocess_definition_bpmn_identifier = ( - bpmn_subprocess_definition_bpmn_identifiers[ - task_definition.bpmn_process_definition_id - ] - ) + bpmn_subprocess_definition_bpmn_identifier = bpmn_subprocess_definition_bpmn_identifiers[ + task_definition.bpmn_process_definition_id + ] cls._update_bpmn_definition_mappings( bpmn_definition_to_task_definitions_mappings, bpmn_subprocess_definition_bpmn_identifier, task_definition, ) - spiff_bpmn_process_dict["subprocess_specs"][ - bpmn_subprocess_definition_bpmn_identifier - ]["task_specs"][ + spiff_bpmn_process_dict["subprocess_specs"][bpmn_subprocess_definition_bpmn_identifier]["task_specs"][ task_definition.bpmn_identifier ] = task_definition.properties_json @classmethod - def _get_bpmn_process_dict( - cls, bpmn_process: BpmnProcessModel, get_tasks: bool = False - ) -> dict: - json_data = JsonDataModel.query.filter_by( - hash=bpmn_process.json_data_hash - ).first() + def _get_bpmn_process_dict(cls, bpmn_process: BpmnProcessModel, get_tasks: bool = False) -> dict: + json_data = JsonDataModel.query.filter_by(hash=bpmn_process.json_data_hash).first() bpmn_process_dict = {"data": json_data.data, "tasks": {}} bpmn_process_dict.update(bpmn_process.properties_json) if get_tasks: @@ -674,12 +615,8 @@ class ProcessInstanceProcessor: for task in tasks: tasks_dict = spiff_bpmn_process_dict["tasks"] if bpmn_subprocess_id_to_guid_mappings: - bpmn_subprocess_guid = bpmn_subprocess_id_to_guid_mappings[ - task.bpmn_process_id - ] - tasks_dict = spiff_bpmn_process_dict["subprocesses"][ - bpmn_subprocess_guid - ]["tasks"] + bpmn_subprocess_guid = bpmn_subprocess_id_to_guid_mappings[task.bpmn_process_id] + tasks_dict = spiff_bpmn_process_dict["subprocesses"][bpmn_subprocess_guid]["tasks"] tasks_dict[task.guid] = task.properties_json tasks_dict[task.guid]["data"] = json_data_mappings[task.json_data_hash] @@ -700,11 +637,9 @@ class ProcessInstanceProcessor: } bpmn_process_definition = process_instance_model.bpmn_process_definition if bpmn_process_definition is not None: - spiff_bpmn_process_dict["spec"] = ( - cls._get_definition_dict_for_bpmn_process_definition( - bpmn_process_definition, - bpmn_definition_to_task_definitions_mappings, - ) + spiff_bpmn_process_dict["spec"] = cls._get_definition_dict_for_bpmn_process_definition( + bpmn_process_definition, + bpmn_definition_to_task_definitions_mappings, ) cls._set_definition_dict_for_bpmn_subprocess_definitions( bpmn_process_definition, @@ -714,32 +649,20 @@ class ProcessInstanceProcessor: bpmn_process = process_instance_model.bpmn_process if bpmn_process is not None: - single_bpmn_process_dict = cls._get_bpmn_process_dict( - bpmn_process, get_tasks=True - ) + single_bpmn_process_dict = cls._get_bpmn_process_dict(bpmn_process, get_tasks=True) spiff_bpmn_process_dict.update(single_bpmn_process_dict) - bpmn_subprocesses = BpmnProcessModel.query.filter_by( - parent_process_id=bpmn_process.id - ).all() + bpmn_subprocesses = BpmnProcessModel.query.filter_by(parent_process_id=bpmn_process.id).all() bpmn_subprocess_id_to_guid_mappings = {} for bpmn_subprocess in bpmn_subprocesses: - bpmn_subprocess_id_to_guid_mappings[bpmn_subprocess.id] = ( - bpmn_subprocess.guid - ) - single_bpmn_process_dict = cls._get_bpmn_process_dict( - bpmn_subprocess - ) - spiff_bpmn_process_dict["subprocesses"][ - bpmn_subprocess.guid - ] = single_bpmn_process_dict + bpmn_subprocess_id_to_guid_mappings[bpmn_subprocess.id] = bpmn_subprocess.guid + single_bpmn_process_dict = cls._get_bpmn_process_dict(bpmn_subprocess) + spiff_bpmn_process_dict["subprocesses"][bpmn_subprocess.guid] = single_bpmn_process_dict tasks = TaskModel.query.filter( TaskModel.bpmn_process_id.in_(bpmn_subprocess_id_to_guid_mappings.keys()) # type: ignore ).all() - cls._get_tasks_dict( - tasks, spiff_bpmn_process_dict, bpmn_subprocess_id_to_guid_mappings - ) + cls._get_tasks_dict(tasks, spiff_bpmn_process_dict, bpmn_subprocess_id_to_guid_mappings) return spiff_bpmn_process_dict @@ -786,17 +709,11 @@ class ProcessInstanceProcessor: spiff_logger.setLevel(logging.WARNING) try: - full_bpmn_process_dict = ( - ProcessInstanceProcessor._get_full_bpmn_process_dict( - process_instance_model, - bpmn_definition_to_task_definitions_mappings, - ) - ) - bpmn_process_instance = ( - ProcessInstanceProcessor._serializer.workflow_from_dict( - full_bpmn_process_dict - ) + full_bpmn_process_dict = ProcessInstanceProcessor._get_full_bpmn_process_dict( + process_instance_model, + bpmn_definition_to_task_definitions_mappings, ) + bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict(full_bpmn_process_dict) except Exception as err: raise err finally: @@ -804,14 +721,10 @@ class ProcessInstanceProcessor: ProcessInstanceProcessor.set_script_engine(bpmn_process_instance) else: - bpmn_process_instance = ( - ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec( - spec, subprocesses - ) + bpmn_process_instance = ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec( + spec, subprocesses ) - bpmn_process_instance.data[ - ProcessInstanceProcessor.VALIDATION_PROCESS_KEY - ] = validate_only + bpmn_process_instance.data[ProcessInstanceProcessor.VALIDATION_PROCESS_KEY] = validate_only return ( bpmn_process_instance, full_bpmn_process_dict, @@ -820,22 +733,16 @@ class ProcessInstanceProcessor: def slam_in_data(self, data: dict) -> None: """Slam_in_data.""" - self.bpmn_process_instance.data = DeepMerge.merge( - self.bpmn_process_instance.data, data - ) + self.bpmn_process_instance.data = DeepMerge.merge(self.bpmn_process_instance.data, data) self.save() - def raise_if_no_potential_owners( - self, potential_owner_ids: list[int], message: str - ) -> None: + def raise_if_no_potential_owners(self, potential_owner_ids: list[int], message: str) -> None: """Raise_if_no_potential_owners.""" if not potential_owner_ids: raise NoPotentialOwnersForTaskError(message) - def get_potential_owner_ids_from_task( - self, task: SpiffTask - ) -> PotentialOwnerIdList: + def get_potential_owner_ids_from_task(self, task: SpiffTask) -> PotentialOwnerIdList: """Get_potential_owner_ids_from_task.""" task_spec = task.task_spec task_lane = "process_initiator" @@ -862,14 +769,8 @@ class ProcessInstanceProcessor: else: group_model = GroupModel.query.filter_by(identifier=task_lane).first() if group_model is None: - raise ( - NoPotentialOwnersForTaskError( - f"Could not find a group with name matching lane: {task_lane}" - ) - ) - potential_owner_ids = [ - i.user_id for i in group_model.user_group_assignments - ] + raise (NoPotentialOwnersForTaskError(f"Could not find a group with name matching lane: {task_lane}")) + potential_owner_ids = [i.user_id for i in group_model.user_group_assignments] lane_assignment_id = group_model.id self.raise_if_no_potential_owners( potential_owner_ids, @@ -961,14 +862,10 @@ class ProcessInstanceProcessor: for task_name, _task_spec in bpmn_definition_dict["spec"]["task_specs"].items(): processes[bpmn_definition_dict["spec"]["name"]].append(task_name) if "subprocess_specs" in bpmn_definition_dict: - for subprocess_name, subprocess_details in bpmn_definition_dict[ - "subprocess_specs" - ].items(): + for subprocess_name, subprocess_details in bpmn_definition_dict["subprocess_specs"].items(): processes[subprocess_name] = [] if "task_specs" in subprocess_details: - for task_name, _task_spec in subprocess_details[ - "task_specs" - ].items(): + for task_name, _task_spec in subprocess_details["task_specs"].items(): processes[subprocess_name].append(task_name) return processes @@ -987,11 +884,7 @@ class ProcessInstanceProcessor: for process_name, task_spec_names in processes.items(): if task_name in task_spec_names: - process_name_to_return = ( - self.find_process_model_process_name_by_task_name( - process_name, processes - ) - ) + process_name_to_return = self.find_process_model_process_name_by_task_name(process_name, processes) return process_name_to_return ################################################################# @@ -1007,9 +900,7 @@ class ProcessInstanceProcessor: bpmn_definition_dict = self.full_bpmn_process_dict spiff_task_json = bpmn_definition_dict["spec"]["task_specs"] or {} if "subprocess_specs" in bpmn_definition_dict: - for _subprocess_name, subprocess_details in bpmn_definition_dict[ - "subprocess_specs" - ].items(): + for _subprocess_name, subprocess_details in bpmn_definition_dict["subprocess_specs"].items(): if "task_specs" in subprocess_details: spiff_task_json = spiff_task_json | subprocess_details["task_specs"] return spiff_task_json @@ -1035,16 +926,12 @@ class ProcessInstanceProcessor: subprocesses_by_child_task_ids = {} task_typename_by_task_id = {} if "subprocesses" in process_instance_data_dict: - for subprocess_id, subprocess_details in process_instance_data_dict[ - "subprocesses" - ].items(): + for subprocess_id, subprocess_details in process_instance_data_dict["subprocesses"].items(): for task_id, task_details in subprocess_details["tasks"].items(): subprocesses_by_child_task_ids[task_id] = subprocess_id task_name = task_details["task_spec"] if task_name in spiff_task_json: - task_typename_by_task_id[task_id] = spiff_task_json[task_name][ - "typename" - ] + task_typename_by_task_id[task_id] = spiff_task_json[task_name]["typename"] return (subprocesses_by_child_task_ids, task_typename_by_task_id) def get_highest_level_calling_subprocesses_by_child_task_ids( @@ -1060,15 +947,10 @@ class ProcessInstanceProcessor: if current_subprocess_id_for_task in task_typename_by_task_id: # a call activity is like the top-level subprocess since it is the calling subprocess # according to spiff and the top-level calling subprocess is really what we care about - if ( - task_typename_by_task_id[current_subprocess_id_for_task] - == "CallActivity" - ): + if task_typename_by_task_id[current_subprocess_id_for_task] == "CallActivity": continue - subprocesses_by_child_task_ids[task_id] = ( - subprocesses_by_child_task_ids[subprocess_id] - ) + subprocesses_by_child_task_ids[task_id] = subprocesses_by_child_task_ids[subprocess_id] self.get_highest_level_calling_subprocesses_by_child_task_ids( subprocesses_by_child_task_ids, task_typename_by_task_id ) @@ -1081,12 +963,10 @@ class ProcessInstanceProcessor: store_bpmn_definition_mappings: bool = False, ) -> BpmnProcessDefinitionModel: process_bpmn_identifier = process_bpmn_properties["name"] - new_hash_digest = sha256( - json.dumps(process_bpmn_properties, sort_keys=True).encode("utf8") - ).hexdigest() - bpmn_process_definition: Optional[BpmnProcessDefinitionModel] = ( - BpmnProcessDefinitionModel.query.filter_by(hash=new_hash_digest).first() - ) + new_hash_digest = sha256(json.dumps(process_bpmn_properties, sort_keys=True).encode("utf8")).hexdigest() + bpmn_process_definition: Optional[BpmnProcessDefinitionModel] = BpmnProcessDefinitionModel.query.filter_by( + hash=new_hash_digest + ).first() if bpmn_process_definition is None: task_specs = process_bpmn_properties.pop("task_specs") @@ -1125,12 +1005,10 @@ class ProcessInstanceProcessor: ) if bpmn_process_definition_parent is not None: - bpmn_process_definition_relationship = ( - BpmnProcessDefinitionRelationshipModel.query.filter_by( - bpmn_process_definition_parent_id=bpmn_process_definition_parent.id, - bpmn_process_definition_child_id=bpmn_process_definition.id, - ).first() - ) + bpmn_process_definition_relationship = BpmnProcessDefinitionRelationshipModel.query.filter_by( + bpmn_process_definition_parent_id=bpmn_process_definition_parent.id, + bpmn_process_definition_child_id=bpmn_process_definition.id, + ).first() if bpmn_process_definition_relationship is None: bpmn_process_definition_relationship = BpmnProcessDefinitionRelationshipModel( bpmn_process_definition_parent_id=bpmn_process_definition_parent.id, @@ -1141,9 +1019,7 @@ class ProcessInstanceProcessor: def _add_bpmn_process_definitions(self, bpmn_spec_dict: dict) -> None: # store only if mappings is currently empty. this also would mean this is a new instance that has never saved before - store_bpmn_definition_mappings = ( - not self.bpmn_definition_to_task_definitions_mappings - ) + store_bpmn_definition_mappings = not self.bpmn_definition_to_task_definitions_mappings bpmn_process_definition_parent = self._store_bpmn_process_definition( bpmn_spec_dict["spec"], store_bpmn_definition_mappings=store_bpmn_definition_mappings, @@ -1154,9 +1030,7 @@ class ProcessInstanceProcessor: bpmn_process_definition_parent, store_bpmn_definition_mappings=store_bpmn_definition_mappings, ) - self.process_instance_model.bpmn_process_definition = ( - bpmn_process_definition_parent - ) + self.process_instance_model.bpmn_process_definition = bpmn_process_definition_parent def _add_bpmn_json_records(self) -> None: """Adds serialized_bpmn_definition and process_instance_data records to the db session. @@ -1178,14 +1052,12 @@ class ProcessInstanceProcessor: self._add_bpmn_process_definitions(bpmn_spec_dict) subprocesses = process_instance_data_dict.pop("subprocesses") - bpmn_process_parent, new_task_models, new_json_data_dicts = ( - TaskService.add_bpmn_process( - bpmn_process_dict=process_instance_data_dict, - process_instance=self.process_instance_model, - bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, - spiff_workflow=self.bpmn_process_instance, - serializer=self._serializer, - ) + bpmn_process_parent, new_task_models, new_json_data_dicts = TaskService.add_bpmn_process( + bpmn_process_dict=process_instance_data_dict, + process_instance=self.process_instance_model, + bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, + spiff_workflow=self.bpmn_process_instance, + serializer=self._serializer, ) for subprocess_task_id, subprocess_properties in subprocesses.items(): ( @@ -1216,13 +1088,10 @@ class ProcessInstanceProcessor: user_tasks = list(self.get_all_user_tasks()) self.process_instance_model.status = self.get_status().value current_app.logger.debug( - f"the_status: {self.process_instance_model.status} for instance" - f" {self.process_instance_model.id}" + f"the_status: {self.process_instance_model.status} for instance {self.process_instance_model.id}" ) self.process_instance_model.total_tasks = len(user_tasks) - self.process_instance_model.completed_tasks = sum( - 1 for t in user_tasks if t.state in complete_states - ) + self.process_instance_model.completed_tasks = sum(1 for t in user_tasks if t.state in complete_states) if self.process_instance_model.start_in_seconds is None: self.process_instance_model.start_in_seconds = round(time.time()) @@ -1252,9 +1121,7 @@ class ProcessInstanceProcessor: # filter out non-usertasks task_spec = ready_or_waiting_task.task_spec if not self.bpmn_process_instance._is_engine_task(task_spec): - potential_owner_hash = self.get_potential_owner_ids_from_task( - ready_or_waiting_task - ) + potential_owner_hash = self.get_potential_owner_ids_from_task(ready_or_waiting_task) extensions = task_spec.extensions # in the xml, it's the id attribute. this identifies the process where the activity lives. @@ -1292,21 +1159,15 @@ class ProcessInstanceProcessor: ) db.session.add(human_task) - for potential_owner_id in potential_owner_hash[ - "potential_owner_ids" - ]: - human_task_user = HumanTaskUserModel( - user_id=potential_owner_id, human_task=human_task - ) + for potential_owner_id in potential_owner_hash["potential_owner_ids"]: + human_task_user = HumanTaskUserModel(user_id=potential_owner_id, human_task=human_task) db.session.add(human_task_user) self.increment_spiff_step() spiff_step_detail_mapping = self.spiff_step_details_mapping( spiff_task=ready_or_waiting_task, start_in_seconds=time.time() ) - spiff_step_detail = SpiffStepDetailsModel( - **spiff_step_detail_mapping - ) + spiff_step_detail = SpiffStepDetailsModel(**spiff_step_detail_mapping) db.session.add(spiff_step_detail) db.session.commit() # self.log_spiff_step_details(spiff_step_detail_mapping) @@ -1330,8 +1191,7 @@ class ProcessInstanceProcessor: if payload is not None: event_definition.payload = payload current_app.logger.info( - f"Event of type {event_definition.event_type} sent to process instance" - f" {self.process_instance_model.id}" + f"Event of type {event_definition.event_type} sent to process instance {self.process_instance_model.id}" ) try: self.bpmn_process_instance.catch(event_definition) @@ -1370,18 +1230,14 @@ class ProcessInstanceProcessor: spiff_task.complete() else: spiff_logger = logging.getLogger("spiff") - spiff_logger.info( - f"Skipped task {spiff_task.task_spec.name}", extra=spiff_task.log_info() - ) + spiff_logger.info(f"Skipped task {spiff_task.task_spec.name}", extra=spiff_task.log_info()) spiff_task._set_state(TaskState.COMPLETED) for child in spiff_task.children: child.task_spec._update(child) spiff_task.workflow.last_task = spiff_task if isinstance(spiff_task.task_spec, EndEvent): - for task in self.bpmn_process_instance.get_tasks( - TaskState.DEFINITE_MASK, workflow=spiff_task.workflow - ): + for task in self.bpmn_process_instance.get_tasks(TaskState.DEFINITE_MASK, workflow=spiff_task.workflow): task.complete() # A subworkflow task will become ready when its workflow is complete. Engine steps would normally @@ -1407,8 +1263,7 @@ class ProcessInstanceProcessor: step_detail = ( db.session.query(SpiffStepDetailsModel) .filter( - SpiffStepDetailsModel.process_instance_id - == self.process_instance_model.id, + SpiffStepDetailsModel.process_instance_id == self.process_instance_model.id, SpiffStepDetailsModel.spiff_step == spiff_step, ) .first() @@ -1454,15 +1309,11 @@ class ProcessInstanceProcessor: process_models = ProcessModelService.get_process_models(recursive=True) for process_model in process_models: try: - refs = SpecFileService.reference_map( - SpecFileService.get_references_for_process(process_model) - ) + refs = SpecFileService.reference_map(SpecFileService.get_references_for_process(process_model)) bpmn_process_identifiers = refs.keys() if bpmn_process_identifier in bpmn_process_identifiers: SpecFileService.update_process_cache(refs[bpmn_process_identifier]) - return FileSystemService.full_path_to_process_model_file( - process_model - ) + return FileSystemService.full_path_to_process_model_file(process_model) except Exception: current_app.logger.warning("Failed to parse process ", process_model.id) return None @@ -1474,19 +1325,14 @@ class ProcessInstanceProcessor: """Bpmn_file_full_path_from_bpmn_process_identifier.""" if bpmn_process_identifier is None: raise ValueError( - "bpmn_file_full_path_from_bpmn_process_identifier:" - " bpmn_process_identifier is unexpectedly None" + "bpmn_file_full_path_from_bpmn_process_identifier: bpmn_process_identifier is unexpectedly None" ) - spec_reference = SpecReferenceCache.query.filter_by( - identifier=bpmn_process_identifier, type="process" - ).first() + spec_reference = SpecReferenceCache.query.filter_by(identifier=bpmn_process_identifier, type="process").first() bpmn_file_full_path = None if spec_reference is None: - bpmn_file_full_path = ( - ProcessInstanceProcessor.backfill_missing_spec_reference_records( - bpmn_process_identifier - ) + bpmn_file_full_path = ProcessInstanceProcessor.backfill_missing_spec_reference_records( + bpmn_process_identifier ) else: bpmn_file_full_path = os.path.join( @@ -1497,10 +1343,7 @@ class ProcessInstanceProcessor: raise ( ApiError( error_code="could_not_find_bpmn_process_identifier", - message=( - "Could not find the the given bpmn process identifier from any" - " sources: %s" - ) + message="Could not find the the given bpmn process identifier from any sources: %s" % bpmn_process_identifier, ) ) @@ -1532,9 +1375,7 @@ class ProcessInstanceProcessor: bpmn_process_identifier ) new_bpmn_files.add(new_bpmn_file_full_path) - dmn_file_glob = os.path.join( - os.path.dirname(new_bpmn_file_full_path), "*.dmn" - ) + dmn_file_glob = os.path.join(os.path.dirname(new_bpmn_file_full_path), "*.dmn") parser.add_dmn_files_by_glob(dmn_file_glob) processed_identifiers.add(bpmn_process_identifier) @@ -1565,36 +1406,24 @@ class ProcessInstanceProcessor: error_code="invalid_xml", message=f"'{file.name}' is not a valid xml file." + str(xse), ) from xse - if ( - process_model_info.primary_process_id is None - or process_model_info.primary_process_id == "" - ): + if process_model_info.primary_process_id is None or process_model_info.primary_process_id == "": raise ( ApiError( error_code="no_primary_bpmn_error", - message=( - "There is no primary BPMN process id defined for" - " process_model %s" - ) - % process_model_info.id, + message="There is no primary BPMN process id defined for process_model %s" % process_model_info.id, ) ) - ProcessInstanceProcessor.update_spiff_parser_with_all_process_dependency_files( - parser - ) + ProcessInstanceProcessor.update_spiff_parser_with_all_process_dependency_files(parser) try: bpmn_process_spec = parser.get_spec(process_model_info.primary_process_id) # returns a dict of {process_id: bpmn_process_spec}, otherwise known as an IdToBpmnProcessSpecMapping - subprocesses = parser.get_subprocess_specs( - process_model_info.primary_process_id - ) + subprocesses = parser.get_subprocess_specs(process_model_info.primary_process_id) except ValidationException as ve: raise ApiError( error_code="process_instance_validation_error", - message="Failed to parse the Workflow Specification. " - + "Error is '%s.'" % str(ve), + message="Failed to parse the Workflow Specification. " + "Error is '%s.'" % str(ve), file_name=ve.file_name, task_name=ve.name, task_id=ve.id, @@ -1655,9 +1484,7 @@ class ProcessInstanceProcessor: def queue_waiting_receive_messages(self) -> None: """Queue_waiting_receive_messages.""" waiting_events = self.bpmn_process_instance.waiting_events() - waiting_message_events = filter( - lambda e: e["event_type"] == "Message", waiting_events - ) + waiting_message_events = filter(lambda e: e["event_type"] == "Message", waiting_events) for event in waiting_message_events: # Ensure we are only creating one message instance for each waiting message @@ -1705,15 +1532,11 @@ class ProcessInstanceProcessor: ) -> None: # NOTE: To avoid saving spiff step details, just comment out this function and the step_delegate and # set the TaskModelSavingDelegate's secondary_engine_step_delegate to None. - def spiff_step_details_mapping_builder( - task: SpiffTask, start: float, end: float - ) -> dict: + def spiff_step_details_mapping_builder(task: SpiffTask, start: float, end: float) -> dict: self._script_engine.environment.revise_state_with_task_data(task) return self.spiff_step_details_mapping(task, start, end) - step_delegate = StepDetailLoggingDelegate( - self.increment_spiff_step, spiff_step_details_mapping_builder - ) + step_delegate = StepDetailLoggingDelegate(self.increment_spiff_step, spiff_step_details_mapping_builder) task_model_delegate = TaskModelSavingDelegate( secondary_engine_step_delegate=step_delegate, serializer=self._serializer, @@ -1722,13 +1545,9 @@ class ProcessInstanceProcessor: ) if execution_strategy_name is None: - execution_strategy_name = current_app.config[ - "SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB" - ] + execution_strategy_name = current_app.config["SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB"] - execution_strategy = execution_strategy_named( - execution_strategy_name, task_model_delegate - ) + execution_strategy = execution_strategy_named(execution_strategy_name, task_model_delegate) execution_service = WorkflowExecutionService( self.bpmn_process_instance, self.process_instance_model, @@ -1764,14 +1583,8 @@ class ProcessInstanceProcessor: raise ApiError.from_workflow_exception("task_error", str(we), we) from we @classmethod - def get_tasks_with_data( - cls, bpmn_process_instance: BpmnWorkflow - ) -> List[SpiffTask]: - return [ - task - for task in bpmn_process_instance.get_tasks(TaskState.FINISHED_MASK) - if len(task.data) > 0 - ] + def get_tasks_with_data(cls, bpmn_process_instance: BpmnWorkflow) -> List[SpiffTask]: + return [task for task in bpmn_process_instance.get_tasks(TaskState.FINISHED_MASK) if len(task.data) > 0] @classmethod def get_task_data_size(cls, bpmn_process_instance: BpmnWorkflow) -> int: @@ -1785,9 +1598,7 @@ class ProcessInstanceProcessor: @classmethod def get_python_env_size(cls, bpmn_process_instance: BpmnWorkflow) -> int: - user_defined_state = ( - bpmn_process_instance.script_engine.environment.user_defined_state() - ) + user_defined_state = bpmn_process_instance.script_engine.environment.user_defined_state() try: return len(json.dumps(user_defined_state)) @@ -1832,14 +1643,9 @@ class ProcessInstanceProcessor: endtasks = [] if self.bpmn_process_instance.is_completed(): - for task in SpiffTask.Iterator( - self.bpmn_process_instance.task_tree, TaskState.ANY_MASK - ): + for task in SpiffTask.Iterator(self.bpmn_process_instance.task_tree, TaskState.ANY_MASK): # Assure that we find the end event for this process_instance, and not for any sub-process_instances. - if ( - isinstance(task.task_spec, EndEvent) - and task.workflow == self.bpmn_process_instance - ): + if isinstance(task.task_spec, EndEvent) and task.workflow == self.bpmn_process_instance: endtasks.append(task) if len(endtasks) > 0: return endtasks[-1] @@ -1873,10 +1679,7 @@ class ProcessInstanceProcessor: if task._is_descendant_of(last_user_task): return task for task in ready_tasks: - if ( - self.bpmn_process_instance.last_task - and task.parent == last_user_task.parent - ): + if self.bpmn_process_instance.last_task and task.parent == last_user_task.parent: return task return ready_tasks[0] @@ -1884,9 +1687,7 @@ class ProcessInstanceProcessor: # If there are no ready tasks, but the thing isn't complete yet, find the first non-complete task # and return that next_task = None - for task in SpiffTask.Iterator( - self.bpmn_process_instance.task_tree, TaskState.NOT_FINISHED_MASK - ): + for task in SpiffTask.Iterator(self.bpmn_process_instance.task_tree, TaskState.NOT_FINISHED_MASK): next_task = task return next_task @@ -1896,9 +1697,7 @@ class ProcessInstanceProcessor: user_tasks.reverse() user_tasks = list( filter( - lambda task: not self.bpmn_process_instance._is_engine_task( - task.task_spec - ), + lambda task: not self.bpmn_process_instance._is_engine_task(task.task_spec), user_tasks, ) ) @@ -1907,24 +1706,19 @@ class ProcessInstanceProcessor: def get_task_dict_from_spiff_task(self, spiff_task: SpiffTask) -> dict[str, Any]: default_registry = DefaultRegistry() task_data = default_registry.convert(spiff_task.data) - python_env = default_registry.convert( - self._script_engine.environment.last_result() - ) + python_env = default_registry.convert(self._script_engine.environment.last_result()) task_json: Dict[str, Any] = { "task_data": task_data, "python_env": python_env, } return task_json - def complete_task( - self, spiff_task: SpiffTask, human_task: HumanTaskModel, user: UserModel - ) -> None: + def complete_task(self, spiff_task: SpiffTask, human_task: HumanTaskModel, user: UserModel) -> None: """Complete_task.""" task_model = TaskModel.query.filter_by(guid=human_task.task_id).first() if task_model is None: raise TaskNotFoundError( - "Cannot find a task with guid" - f" {self.process_instance_model.id} and task_id is {human_task.task_id}" + f"Cannot find a task with guid {self.process_instance_model.id} and task_id is {human_task.task_id}" ) task_model.start_in_seconds = time.time() @@ -1958,16 +1752,10 @@ class ProcessInstanceProcessor: db.session.add(details_model) # ####### - json_data_dict_list = TaskService.update_task_model( - task_model, spiff_task, self._serializer - ) + json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self._serializer) for json_data_dict in json_data_dict_list: if json_data_dict is not None: - json_data = ( - db.session.query(JsonDataModel.id) - .filter_by(hash=json_data_dict["hash"]) - .first() - ) + json_data = db.session.query(JsonDataModel.id).filter_by(hash=json_data_dict["hash"]).first() if json_data is None: json_data = JsonDataModel(**json_data_dict) db.session.add(json_data) @@ -2021,11 +1809,7 @@ class ProcessInstanceProcessor: def get_all_user_tasks(self) -> List[SpiffTask]: """Get_all_user_tasks.""" all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) - return [ - t - for t in all_tasks - if not self.bpmn_process_instance._is_engine_task(t.task_spec) - ] + return [t for t in all_tasks if not self.bpmn_process_instance._is_engine_task(t.task_spec)] def get_all_completed_tasks(self) -> list[SpiffTask]: """Get_all_completed_tasks.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py index a0aceb94..2d2bc4df 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py @@ -31,9 +31,7 @@ class ProcessInstanceQueueService: queue_item = ProcessInstanceLockService.try_unlock(process_instance.id) if queue_item is None: - queue_item = ProcessInstanceQueueModel( - process_instance_id=process_instance.id - ) + queue_item = ProcessInstanceQueueModel(process_instance_id=process_instance.id) # TODO: configurable params (priority/run_at) queue_item.run_at_in_seconds = round(time.time()) @@ -73,8 +71,7 @@ class ProcessInstanceQueueService: if queue_entry is None: raise ProcessInstanceIsNotEnqueuedError( - f"{locked_by} cannot lock process instance {process_instance.id}. It" - " has not been enqueued." + f"{locked_by} cannot lock process instance {process_instance.id}. It has not been enqueued." ) if queue_entry.locked_by != locked_by: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index e2d7ef19..62f7c993 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -76,13 +76,9 @@ class ProcessInstanceReportFilter: if self.has_terminal_status is not None: d["has_terminal_status"] = str(self.has_terminal_status).lower() if self.with_tasks_completed_by_me is not None: - d["with_tasks_completed_by_me"] = str( - self.with_tasks_completed_by_me - ).lower() + d["with_tasks_completed_by_me"] = str(self.with_tasks_completed_by_me).lower() if self.with_tasks_assigned_to_my_group is not None: - d["with_tasks_assigned_to_my_group"] = str( - self.with_tasks_assigned_to_my_group - ).lower() + d["with_tasks_assigned_to_my_group"] = str(self.with_tasks_assigned_to_my_group).lower() if self.with_relation_to_me is not None: d["with_relation_to_me"] = str(self.with_relation_to_me).lower() if self.process_initiator_username is not None: @@ -177,8 +173,7 @@ class ProcessInstanceReportService: report_metadata = cls.system_metadata_map(report_identifier) if report_metadata is None: raise ProcessInstanceReportNotFoundError( - f"Could not find a report with identifier '{report_identifier}' for" - f" user '{user.username}'" + f"Could not find a report with identifier '{report_identifier}' for user '{user.username}'" ) process_instance_report = ProcessInstanceReportModel( @@ -190,23 +185,15 @@ class ProcessInstanceReportService: return process_instance_report # type: ignore @classmethod - def filter_by_to_dict( - cls, process_instance_report: ProcessInstanceReportModel - ) -> dict[str, str]: + def filter_by_to_dict(cls, process_instance_report: ProcessInstanceReportModel) -> dict[str, str]: """Filter_by_to_dict.""" metadata = process_instance_report.report_metadata filter_by = metadata.get("filter_by", []) - filters = { - d["field_name"]: d["field_value"] - for d in filter_by - if "field_name" in d and "field_value" in d - } + filters = {d["field_name"]: d["field_value"] for d in filter_by if "field_name" in d and "field_value" in d} return filters @classmethod - def filter_from_metadata( - cls, process_instance_report: ProcessInstanceReportModel - ) -> ProcessInstanceReportFilter: + def filter_from_metadata(cls, process_instance_report: ProcessInstanceReportModel) -> ProcessInstanceReportFilter: """Filter_from_metadata.""" filters = cls.filter_by_to_dict(process_instance_report) @@ -308,9 +295,7 @@ class ProcessInstanceReportService: if report_filter_by_list is not None: report_filter.report_filter_by_list = report_filter_by_list if with_tasks_assigned_to_my_group is not None: - report_filter.with_tasks_assigned_to_my_group = ( - with_tasks_assigned_to_my_group - ) + report_filter.with_tasks_assigned_to_my_group = with_tasks_assigned_to_my_group if with_relation_to_me is not None: report_filter.with_relation_to_me = with_relation_to_me @@ -328,17 +313,13 @@ class ProcessInstanceReportService: process_instance_dict = process_instance["ProcessInstanceModel"].serialized for metadata_column in metadata_columns: if metadata_column["accessor"] not in process_instance_dict: - process_instance_dict[metadata_column["accessor"]] = ( - process_instance[metadata_column["accessor"]] - ) + process_instance_dict[metadata_column["accessor"]] = process_instance[metadata_column["accessor"]] results.append(process_instance_dict) return results @classmethod - def get_column_names_for_model( - cls, model: Type[SpiffworkflowBaseDBModel] - ) -> list[str]: + def get_column_names_for_model(cls, model: Type[SpiffworkflowBaseDBModel]) -> list[str]: """Get_column_names_for_model.""" return [i.name for i in model.__table__.columns] @@ -374,24 +355,17 @@ class ProcessInstanceReportService: """Run_process_instance_report.""" process_instance_query = ProcessInstanceModel.query # Always join that hot user table for good performance at serialization time. - process_instance_query = process_instance_query.options( - selectinload(ProcessInstanceModel.process_initiator) - ) + process_instance_query = process_instance_query.options(selectinload(ProcessInstanceModel.process_initiator)) if report_filter.process_model_identifier is not None: process_model = ProcessModelService.get_process_model( f"{report_filter.process_model_identifier}", ) - process_instance_query = process_instance_query.filter_by( - process_model_identifier=process_model.id - ) + process_instance_query = process_instance_query.filter_by(process_model_identifier=process_model.id) # this can never happen. obviously the class has the columns it defines. this is just to appease mypy. - if ( - ProcessInstanceModel.start_in_seconds is None - or ProcessInstanceModel.end_in_seconds is None - ): + if ProcessInstanceModel.start_in_seconds is None or ProcessInstanceModel.end_in_seconds is None: raise ( ApiError( error_code="unexpected_condition", @@ -422,9 +396,7 @@ class ProcessInstanceReportService: ) if report_filter.initiated_by_me is True: - process_instance_query = process_instance_query.filter_by( - process_initiator=user - ) + process_instance_query = process_instance_query.filter_by(process_initiator=user) if report_filter.has_terminal_status is True: process_instance_query = process_instance_query.filter( @@ -432,24 +404,18 @@ class ProcessInstanceReportService: ) if report_filter.process_initiator_username is not None: - user = UserModel.query.filter_by( - username=report_filter.process_initiator_username - ).first() + user = UserModel.query.filter_by(username=report_filter.process_initiator_username).first() process_initiator_id = -1 if user: process_initiator_id = user.id - process_instance_query = process_instance_query.filter_by( - process_initiator_id=process_initiator_id - ) + process_instance_query = process_instance_query.filter_by(process_initiator_id=process_initiator_id) if ( not report_filter.with_tasks_completed_by_me and not report_filter.with_tasks_assigned_to_my_group and report_filter.with_relation_to_me is True ): - process_instance_query = process_instance_query.outerjoin( - HumanTaskModel - ).outerjoin( + process_instance_query = process_instance_query.outerjoin(HumanTaskModel).outerjoin( HumanTaskUserModel, and_( HumanTaskModel.id == HumanTaskUserModel.human_task_id, @@ -476,37 +442,23 @@ class ProcessInstanceReportService: ) if report_filter.with_tasks_assigned_to_my_group is True: - group_model_join_conditions = [ - GroupModel.id == HumanTaskModel.lane_assignment_id - ] + group_model_join_conditions = [GroupModel.id == HumanTaskModel.lane_assignment_id] if report_filter.user_group_identifier: - group_model_join_conditions.append( - GroupModel.identifier == report_filter.user_group_identifier - ) + group_model_join_conditions.append(GroupModel.identifier == report_filter.user_group_identifier) process_instance_query = process_instance_query.join(HumanTaskModel) - process_instance_query = process_instance_query.join( - GroupModel, and_(*group_model_join_conditions) - ) + process_instance_query = process_instance_query.join(GroupModel, and_(*group_model_join_conditions)) process_instance_query = process_instance_query.join( UserGroupAssignmentModel, UserGroupAssignmentModel.group_id == GroupModel.id, ) - process_instance_query = process_instance_query.filter( - UserGroupAssignmentModel.user_id == user.id - ) + process_instance_query = process_instance_query.filter(UserGroupAssignmentModel.user_id == user.id) instance_metadata_aliases = {} - stock_columns = ProcessInstanceReportService.get_column_names_for_model( - ProcessInstanceModel - ) + stock_columns = ProcessInstanceReportService.get_column_names_for_model(ProcessInstanceModel) if report_filter.report_column_list: - process_instance_report.report_metadata["columns"] = ( - report_filter.report_column_list - ) + process_instance_report.report_metadata["columns"] = report_filter.report_column_list if report_filter.report_filter_by_list: - process_instance_report.report_metadata["filter_by"] = ( - report_filter.report_filter_by_list - ) + process_instance_report.report_metadata["filter_by"] = report_filter.report_filter_by_list for column in process_instance_report.report_metadata["columns"]: if column["accessor"] in stock_columns: @@ -531,14 +483,10 @@ class ProcessInstanceReportService: ] if filter_for_column: isouter = False - conditions.append( - instance_metadata_alias.value == filter_for_column["field_value"] - ) + conditions.append(instance_metadata_alias.value == filter_for_column["field_value"]) process_instance_query = process_instance_query.join( instance_metadata_alias, and_(*conditions), isouter=isouter - ).add_columns( - func.max(instance_metadata_alias.value).label(column["accessor"]) - ) + ).add_columns(func.max(instance_metadata_alias.value).label(column["accessor"])) order_by_query_array = [] order_by_array = process_instance_report.report_metadata["order_by"] @@ -548,22 +496,14 @@ class ProcessInstanceReportService: attribute = re.sub("^-", "", order_by_option) if attribute in stock_columns: if order_by_option.startswith("-"): - order_by_query_array.append( - getattr(ProcessInstanceModel, attribute).desc() - ) + order_by_query_array.append(getattr(ProcessInstanceModel, attribute).desc()) else: - order_by_query_array.append( - getattr(ProcessInstanceModel, attribute).asc() - ) + order_by_query_array.append(getattr(ProcessInstanceModel, attribute).asc()) elif attribute in instance_metadata_aliases: if order_by_option.startswith("-"): - order_by_query_array.append( - func.max(instance_metadata_aliases[attribute].value).desc() - ) + order_by_query_array.append(func.max(instance_metadata_aliases[attribute].value).desc()) else: - order_by_query_array.append( - func.max(instance_metadata_aliases[attribute].value).asc() - ) + order_by_query_array.append(func.max(instance_metadata_aliases[attribute].value).asc()) # return process_instance_query process_instances = ( process_instance_query.group_by(ProcessInstanceModel.id) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index 23ce9a22..5e149965 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -84,9 +84,7 @@ class ProcessInstanceService: @staticmethod def do_waiting(status_value: str = ProcessInstanceStatus.waiting.value) -> None: """Do_waiting.""" - process_instance_ids_to_check = ProcessInstanceQueueService.peek_many( - status_value - ) + process_instance_ids_to_check = ProcessInstanceQueueService.peek_many(status_value) if len(process_instance_ids_to_check) == 0: return @@ -100,18 +98,14 @@ class ProcessInstanceService: locked = False processor = None try: - current_app.logger.info( - f"Processing process_instance {process_instance.id}" - ) + current_app.logger.info(f"Processing process_instance {process_instance.id}") processor = ProcessInstanceProcessor(process_instance) processor.lock_process_instance(process_instance_lock_prefix) locked = True execution_strategy_name = current_app.config[ "SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND" ] - processor.do_engine_steps( - save=True, execution_strategy_name=execution_strategy_name - ) + processor.do_engine_steps(save=True, execution_strategy_name=execution_strategy_name) except ProcessInstanceIsAlreadyLockedError: continue except Exception as e: @@ -120,8 +114,7 @@ class ProcessInstanceService: db.session.add(process_instance) db.session.commit() error_message = ( - "Error running waiting task for process_instance" - f" {process_instance.id}" + f"Error running waiting task for process_instance {process_instance.id}" + f"({process_instance.process_model_identifier}). {str(e)}" ) current_app.logger.error(error_message) @@ -140,9 +133,7 @@ class ProcessInstanceService: # navigation = processor.bpmn_process_instance.get_deep_nav_list() # ProcessInstanceService.update_navigation(navigation, processor) process_model_service = ProcessModelService() - process_model = process_model_service.get_process_model( - processor.process_model_identifier - ) + process_model = process_model_service.get_process_model(processor.process_model_identifier) process_model.display_name if process_model else "" process_instance_api = ProcessInstanceApi( id=processor.get_process_instance_id(), @@ -155,34 +146,24 @@ class ProcessInstanceService: ) next_task_trying_again = next_task - if ( - not next_task - ): # The Next Task can be requested to be a certain task, useful for parallel tasks. + if not next_task: # The Next Task can be requested to be a certain task, useful for parallel tasks. # This may or may not work, sometimes there is no next task to complete. next_task_trying_again = processor.next_task() if next_task_trying_again is not None: - process_instance_api.next_task = ( - ProcessInstanceService.spiff_task_to_api_task( - processor, next_task_trying_again, add_docs_and_forms=True - ) + process_instance_api.next_task = ProcessInstanceService.spiff_task_to_api_task( + processor, next_task_trying_again, add_docs_and_forms=True ) return process_instance_api def get_process_instance(self, process_instance_id: int) -> Any: """Get_process_instance.""" - result = ( - db.session.query(ProcessInstanceModel) - .filter(ProcessInstanceModel.id == process_instance_id) - .first() - ) + result = db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.id == process_instance_id).first() return result @staticmethod - def get_users_assigned_to_task( - processor: ProcessInstanceProcessor, spiff_task: SpiffTask - ) -> List[int]: + def get_users_assigned_to_task(processor: ProcessInstanceProcessor, spiff_task: SpiffTask) -> List[int]: """Get_users_assigned_to_task.""" if processor.process_instance_model.process_initiator_id is None: raise ApiError.from_task( @@ -193,10 +174,7 @@ class ProcessInstanceService: # Workflow associated with a study - get all the users else: - if ( - not hasattr(spiff_task.task_spec, "lane") - or spiff_task.task_spec.lane is None - ): + if not hasattr(spiff_task.task_spec, "lane") or spiff_task.task_spec.lane is None: return [processor.process_instance_model.process_initiator_id] if spiff_task.task_spec.lane not in spiff_task.data: @@ -225,8 +203,7 @@ class ProcessInstanceService: else: raise ApiError.from_task( error_code="task_lane_user_error", - message="Spiff Task %s lane user is not a string or dict" - % spiff_task.task_spec.name, + message="Spiff Task %s lane user is not a string or dict" % spiff_task.task_spec.name, task=spiff_task, ) @@ -287,9 +264,7 @@ class ProcessInstanceService: models = [] for identifier, value, list_index in cls.possible_file_data_values(data): - model = cls.file_data_model_for_value( - identifier, value, process_instance_id - ) + model = cls.file_data_model_for_value(identifier, value, process_instance_id) if model is not None: model.list_index = list_index models.append(model) @@ -303,7 +278,9 @@ class ProcessInstanceService: models: List[ProcessInstanceFileDataModel], ) -> None: for model in models: - digest_reference = f"data:{model.mimetype};name={model.filename};base64,{cls.FILE_DATA_DIGEST_PREFIX}{model.digest}" + digest_reference = ( + f"data:{model.mimetype};name={model.filename};base64,{cls.FILE_DATA_DIGEST_PREFIX}{model.digest}" + ) if model.list_index is None: data[model.identifier] = digest_reference else: @@ -336,9 +313,7 @@ class ProcessInstanceService: Abstracted here because we need to do it multiple times when completing all tasks in a multi-instance task. """ - AuthorizationService.assert_user_can_complete_spiff_task( - processor.process_instance_model.id, spiff_task, user - ) + AuthorizationService.assert_user_can_complete_spiff_task(processor.process_instance_model.id, spiff_task, user) ProcessInstanceService.save_file_data_and_replace_with_digest_references( data, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py index d00ed011..4d9c852f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py @@ -62,9 +62,7 @@ class ProcessModelService(FileSystemService): process_group_path = os.path.abspath( os.path.join( FileSystemService.root_path(), - FileSystemService.id_string_to_relative_path( - process_group_identifier - ), + FileSystemService.id_string_to_relative_path(process_group_identifier), ) ) return cls.is_process_group(process_group_path) @@ -86,9 +84,7 @@ class ProcessModelService(FileSystemService): process_model_path = os.path.abspath( os.path.join( FileSystemService.root_path(), - FileSystemService.id_string_to_relative_path( - process_model_identifier - ), + FileSystemService.id_string_to_relative_path(process_model_identifier), ) ) return cls.is_process_model(process_model_path) @@ -96,9 +92,7 @@ class ProcessModelService(FileSystemService): return False @staticmethod - def write_json_file( - file_path: str, json_data: dict, indent: int = 4, sort_keys: bool = True - ) -> None: + def write_json_file(file_path: str, json_data: dict, indent: int = 4, sort_keys: bool = True) -> None: """Write json file.""" with open(file_path, "w") as h_open: json.dump(json_data, h_open, indent=indent, sort_keys=sort_keys) @@ -120,9 +114,7 @@ class ProcessModelService(FileSystemService): cls.save_process_model(process_model) @classmethod - def update_process_model( - cls, process_model: ProcessModelInfo, attributes_to_update: dict - ) -> None: + def update_process_model(cls, process_model: ProcessModelInfo, attributes_to_update: dict) -> None: """Update_spec.""" for atu_key, atu_value in attributes_to_update.items(): if hasattr(process_model, atu_key): @@ -133,14 +125,10 @@ class ProcessModelService(FileSystemService): def save_process_model(cls, process_model: ProcessModelInfo) -> None: """Save_process_model.""" process_model_path = os.path.abspath( - os.path.join( - FileSystemService.root_path(), process_model.id_for_file_path() - ) + os.path.join(FileSystemService.root_path(), process_model.id_for_file_path()) ) os.makedirs(process_model_path, exist_ok=True) - json_path = os.path.abspath( - os.path.join(process_model_path, cls.PROCESS_MODEL_JSON_FILE) - ) + json_path = os.path.abspath(os.path.join(process_model_path, cls.PROCESS_MODEL_JSON_FILE)) process_model_id = process_model.id # we don't save id in the json file # this allows us to move models around on the filesystem @@ -157,32 +145,25 @@ class ProcessModelService(FileSystemService): ).all() if len(instances) > 0: raise ProcessModelWithInstancesNotDeletableError( - f"We cannot delete the model `{process_model_id}`, there are" - " existing instances that depend on it." + f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it." ) process_model = self.get_process_model(process_model_id) path = self.workflow_path(process_model) shutil.rmtree(path) - def process_model_move( - self, original_process_model_id: str, new_location: str - ) -> ProcessModelInfo: + def process_model_move(self, original_process_model_id: str, new_location: str) -> ProcessModelInfo: """Process_model_move.""" process_model = self.get_process_model(original_process_model_id) original_model_path = self.workflow_path(process_model) _, model_id = os.path.split(original_model_path) new_relative_path = os.path.join(new_location, model_id) - new_model_path = os.path.abspath( - os.path.join(FileSystemService.root_path(), new_relative_path) - ) + new_model_path = os.path.abspath(os.path.join(FileSystemService.root_path(), new_relative_path)) shutil.move(original_model_path, new_model_path) new_process_model = self.get_process_model(new_relative_path) return new_process_model @classmethod - def get_process_model_from_relative_path( - cls, relative_path: str - ) -> ProcessModelInfo: + def get_process_model_from_relative_path(cls, relative_path: str) -> ProcessModelInfo: """Get_process_model_from_relative_path.""" path = os.path.join(FileSystemService.root_path(), relative_path) return cls.__scan_process_model(path) @@ -196,9 +177,7 @@ class ProcessModelService(FileSystemService): if not os.path.exists(FileSystemService.root_path()): raise ProcessEntityNotFoundError("process_model_root_not_found") - model_path = os.path.abspath( - os.path.join(FileSystemService.root_path(), process_model_id) - ) + model_path = os.path.abspath(os.path.join(FileSystemService.root_path(), process_model_id)) if cls.is_process_model(model_path): return cls.get_process_model_from_relative_path(process_model_id) raise ProcessEntityNotFoundError("process_model_not_found") @@ -222,12 +201,8 @@ class ProcessModelService(FileSystemService): process_model_glob = os.path.join(root_path, "**", "process_model.json") for file in glob(process_model_glob, recursive=True): - process_model_relative_path = os.path.relpath( - file, start=FileSystemService.root_path() - ) - process_model = cls.get_process_model_from_relative_path( - os.path.dirname(process_model_relative_path) - ) + process_model_relative_path = os.path.relpath(file, start=FileSystemService.root_path()) + process_model = cls.get_process_model_from_relative_path(os.path.dirname(process_model_relative_path)) process_models.append(process_model) process_models.sort() @@ -235,11 +210,7 @@ class ProcessModelService(FileSystemService): user = UserService.current_user() new_process_model_list = [] for process_model in process_models: - modified_process_model_id = ( - ProcessModelInfo.modify_process_identifier_for_path_param( - process_model.id - ) - ) + modified_process_model_id = ProcessModelInfo.modify_process_identifier_for_path_param(process_model.id) uri = f"/v1.0/process-instances/{modified_process_model_id}" has_permission = AuthorizationService.user_has_permission( user=user, permission="create", target_uri=uri @@ -269,32 +240,24 @@ class ProcessModelService(FileSystemService): if parent_group: if full_group_id_path not in process_group_cache: process_group_cache[full_group_id_path] = parent_group - parent_group_array.append( - {"id": parent_group.id, "display_name": parent_group.display_name} - ) + parent_group_array.append({"id": parent_group.id, "display_name": parent_group.display_name}) return {"cache": process_group_cache, "process_groups": parent_group_array} @classmethod def get_parent_group_array(cls, process_identifier: str) -> list[ProcessGroupLite]: """Get_parent_group_array.""" - parent_group_lites_with_cache = cls.get_parent_group_array_and_cache_it( - process_identifier, {} - ) + parent_group_lites_with_cache = cls.get_parent_group_array_and_cache_it(process_identifier, {}) return parent_group_lites_with_cache["process_groups"] @classmethod - def get_process_groups( - cls, process_group_id: Optional[str] = None - ) -> list[ProcessGroup]: + def get_process_groups(cls, process_group_id: Optional[str] = None) -> list[ProcessGroup]: """Returns the process_groups.""" process_groups = cls.__scan_process_groups(process_group_id) process_groups.sort() return process_groups @classmethod - def get_process_group( - cls, process_group_id: str, find_direct_nested_items: bool = True - ) -> ProcessGroup: + def get_process_group(cls, process_group_id: str, find_direct_nested_items: bool = True) -> ProcessGroup: """Look for a given process_group, and return it.""" if os.path.exists(FileSystemService.root_path()): process_group_path = os.path.abspath( @@ -309,9 +272,7 @@ class ProcessModelService(FileSystemService): find_direct_nested_items=find_direct_nested_items, ) - raise ProcessEntityNotFoundError( - "process_group_not_found", f"Process Group Id: {process_group_id}" - ) + raise ProcessEntityNotFoundError("process_group_not_found", f"Process Group Id: {process_group_id}") @classmethod def add_process_group(cls, process_group: ProcessGroup) -> ProcessGroup: @@ -331,16 +292,12 @@ class ProcessModelService(FileSystemService): cls.write_json_file(json_path, serialized_process_group) return process_group - def process_group_move( - self, original_process_group_id: str, new_location: str - ) -> ProcessGroup: + def process_group_move(self, original_process_group_id: str, new_location: str) -> ProcessGroup: """Process_group_move.""" original_group_path = self.process_group_path(original_process_group_id) _, original_group_id = os.path.split(original_group_path) new_root = os.path.join(FileSystemService.root_path(), new_location) - new_group_path = os.path.abspath( - os.path.join(FileSystemService.root_path(), new_root, original_group_id) - ) + new_group_path = os.path.abspath(os.path.join(FileSystemService.root_path(), new_root, original_group_id)) destination = shutil.move(original_group_path, new_group_path) new_process_group = self.get_process_group(destination) return new_process_group @@ -388,9 +345,7 @@ class ProcessModelService(FileSystemService): return process_groups @classmethod - def __scan_process_groups( - cls, process_group_id: Optional[str] = None - ) -> list[ProcessGroup]: + def __scan_process_groups(cls, process_group_id: Optional[str] = None) -> list[ProcessGroup]: """__scan_process_groups.""" if not os.path.exists(FileSystemService.root_path()): return [] # Nothing to scan yet. There are no files. @@ -409,9 +364,7 @@ class ProcessModelService(FileSystemService): return process_groups @classmethod - def find_or_create_process_group( - cls, dir_path: str, find_direct_nested_items: bool = True - ) -> ProcessGroup: + def find_or_create_process_group(cls, dir_path: str, find_direct_nested_items: bool = True) -> ProcessGroup: """Reads the process_group.json file, and any nested directories.""" cat_path = os.path.join(dir_path, cls.PROCESS_GROUP_JSON_FILE) if os.path.exists(cat_path): @@ -424,15 +377,10 @@ class ProcessModelService(FileSystemService): if process_group is None: raise ApiError( error_code="process_group_could_not_be_loaded_from_disk", - message=( - "We could not load the process_group from disk from:" - f" {dir_path}" - ), + message=f"We could not load the process_group from disk from: {dir_path}", ) else: - process_group_id = cls.path_to_id( - dir_path.replace(FileSystemService.root_path(), "") - ) + process_group_id = cls.path_to_id(dir_path.replace(FileSystemService.root_path(), "")) process_group = ProcessGroup( id="", display_name=process_group_id, @@ -452,9 +400,7 @@ class ProcessModelService(FileSystemService): # TODO: check whether this is a group or model if cls.is_process_group(nested_item.path): # This is a nested group - process_group.process_groups.append( - cls.find_or_create_process_group(nested_item.path) - ) + process_group.process_groups.append(cls.find_or_create_process_group(nested_item.path)) elif ProcessModelService.is_process_model(nested_item.path): process_group.process_models.append( cls.__scan_process_model( @@ -490,19 +436,13 @@ class ProcessModelService(FileSystemService): if process_model_info is None: raise ApiError( error_code="process_model_could_not_be_loaded_from_disk", - message=( - "We could not load the process_model from disk with data:" - f" {data}" - ), + message=f"We could not load the process_model from disk with data: {data}", ) else: if name is None: raise ApiError( error_code="missing_name_of_process_model", - message=( - "Missing name of process model. Path not found:" - f" {json_file_path}" - ), + message=f"Missing name of process model. Path not found: {json_file_path}", ) process_model_info = ProcessModelInfo( @@ -511,9 +451,7 @@ class ProcessModelService(FileSystemService): description="", display_order=0, ) - cls.write_json_file( - json_file_path, cls.PROCESS_MODEL_SCHEMA.dump(process_model_info) - ) + cls.write_json_file(json_file_path, cls.PROCESS_MODEL_SCHEMA.dump(process_model_info)) # we don't store `id` in the json files, so we add it in here process_model_info.id = name return process_model_info diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py index 310f53e9..63c9fe38 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py @@ -81,9 +81,7 @@ class ScriptUnitTestRunner: context = cls._script_engine.environment.last_result() result_as_boolean = context == expected_output_context - script_unit_test_result = ScriptUnitTestResult( - result=result_as_boolean, context=context - ) + script_unit_test_result = ScriptUnitTestResult(result=result_as_boolean, context=context) return script_unit_test_result @classmethod @@ -95,9 +93,7 @@ class ScriptUnitTestRunner: """Run_test.""" # this is totally made up, but hopefully resembles what spiffworkflow ultimately does unit_tests = task.task_spec.extensions["unitTests"] - unit_test = [ - unit_test for unit_test in unit_tests if unit_test["id"] == test_identifier - ][0] + unit_test = [unit_test for unit_test in unit_tests if unit_test["id"] == test_identifier][0] input_context = None expected_output_context = None @@ -114,13 +110,8 @@ class ScriptUnitTestRunner: except json.decoder.JSONDecodeError as ex: return ScriptUnitTestResult( result=False, - error=( - "Failed to parse expectedOutputJson:" - f" {unit_test['expectedOutputJson']}: {str(ex)}" - ), + error=f"Failed to parse expectedOutputJson: {unit_test['expectedOutputJson']}: {str(ex)}", ) script = task.task_spec.script - return cls.run_with_script_and_pre_post_contexts( - script, input_context, expected_output_context - ) + return cls.run_with_script_and_pre_post_contexts(script, input_context, expected_output_context) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/secret_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/secret_service.py index ca6a7e84..5ed0d0d3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/secret_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/secret_service.py @@ -16,10 +16,7 @@ class SecretService: @classmethod def _encrypt(cls, value: str) -> str: encrypted_bytes: bytes = b"" - if ( - current_app.config.get("SPIFFWORKFLOW_BACKEND_ENCRYPTION_LIB") - == "cryptography" - ): + if current_app.config.get("SPIFFWORKFLOW_BACKEND_ENCRYPTION_LIB") == "cryptography": # cryptography needs a bytes object value_as_bytes = str.encode(value) encrypted_bytes = current_app.config["CIPHER"].encrypt(value_as_bytes) @@ -98,9 +95,7 @@ class SecretService: else: raise ApiError( error_code="update_secret_error", - message=( - f"Cannot update secret with key: {key}. Resource does not exist." - ), + message=f"Cannot update secret with key: {key}. Resource does not exist.", status_code=404, ) @@ -115,16 +110,11 @@ class SecretService: except Exception as e: raise ApiError( error_code="delete_secret_error", - message=( - f"Could not delete secret with key: {key}. Original error" - f" is: {e}" - ), + message=f"Could not delete secret with key: {key}. Original error is: {e}", ) from e else: raise ApiError( error_code="delete_secret_error", - message=( - f"Cannot delete secret with key: {key}. Resource does not exist." - ), + message=f"Cannot delete secret with key: {key}. Resource does not exist.", status_code=404, ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py index 77a5fb21..8500ecb5 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py @@ -49,37 +49,19 @@ class ServiceTaskDelegate: """Given a code like 404, return a string like: The requested resource was not found.""" msg = f"HTTP Status Code {code}." if code == 301: - msg = ( - "301 (Permanent Redirect) - you may need to use a different URL in this" - " service task." - ) + msg = "301 (Permanent Redirect) - you may need to use a different URL in this service task." if code == 302: - msg = ( - "302 (Temporary Redirect) - you may need to use a different URL in this" - " service task." - ) + msg = "302 (Temporary Redirect) - you may need to use a different URL in this service task." if code == 400: - msg = ( - "400 (Bad Request) - The request was received by the service, but it" - " was not understood." - ) + msg = "400 (Bad Request) - The request was received by the service, but it was not understood." if code == 401: - msg = ( - "401 (Unauthorized Error) - this end point requires some form of" - " authentication." - ) + msg = "401 (Unauthorized Error) - this end point requires some form of authentication." if code == 403: - msg = ( - "403 (Forbidden) - The service you called refused to accept the" - " request." - ) + msg = "403 (Forbidden) - The service you called refused to accept the request." if code == 404: msg = "404 (Not Found) - The service did not find the requested resource." if code == 500: - msg = ( - "500 (Internal Server Error) - The service you called is experiencing" - " technical difficulties." - ) + msg = "500 (Internal Server Error) - The service you called is experiencing technical difficulties." if code == 501: msg = ( "501 (Not Implemented) - This service needs to be called with the" @@ -94,10 +76,7 @@ class ServiceTaskDelegate: current_app.logger.info(f"Calling connector proxy using connector: {name}") with sentry_sdk.start_span(op="connector_by_name", description=name): with sentry_sdk.start_span(op="call-connector", description=call_url): - params = { - k: ServiceTaskDelegate.check_prefixes(v["value"]) - for k, v in bpmn_params.items() - } + params = {k: ServiceTaskDelegate.check_prefixes(v["value"]) for k, v in bpmn_params.items()} params["spiff__task_data"] = task_data proxied_response = requests.post(call_url, json=params) @@ -113,20 +92,12 @@ class ServiceTaskDelegate: parsed_response = {} if proxied_response.status_code >= 300: - message = ServiceTaskDelegate.get_message_for_status( - proxied_response.status_code - ) - error = ( - f"Received an unexpected response from service {name} :" - f" {message}" - ) + message = ServiceTaskDelegate.get_message_for_status(proxied_response.status_code) + error = f"Received an unexpected response from service {name} : {message}" if "error" in parsed_response: error += parsed_response["error"] if json_parse_error: - error += ( - "A critical component (The connector proxy) is not" - " responding correctly." - ) + error += "A critical component (The connector proxy) is not responding correctly." raise ConnectorProxyError(error) elif json_parse_error: raise ConnectorProxyError( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/spec_file_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/spec_file_service.py index 4a36fe11..b3f9549d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/spec_file_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/spec_file_service.py @@ -48,14 +48,10 @@ class SpecFileService(FileSystemService): extension_filter: str = "", ) -> List[File]: """Return all files associated with a workflow specification.""" - path = os.path.join( - FileSystemService.root_path(), process_model_info.id_for_file_path() - ) + path = os.path.join(FileSystemService.root_path(), process_model_info.id_for_file_path()) files = SpecFileService._get_files(path, file_name) if extension_filter != "": - files = list( - filter(lambda file: file.name.endswith(extension_filter), files) - ) + files = list(filter(lambda file: file.name.endswith(extension_filter), files)) return files @staticmethod @@ -74,23 +70,17 @@ class SpecFileService(FileSystemService): files = SpecFileService.get_files(process_model_info) references = [] for file in files: - references.extend( - SpecFileService.get_references_for_file(file, process_model_info) - ) + references.extend(SpecFileService.get_references_for_file(file, process_model_info)) return references @classmethod - def get_references_for_file( - cls, file: File, process_model_info: ProcessModelInfo - ) -> list[SpecReference]: + def get_references_for_file(cls, file: File, process_model_info: ProcessModelInfo) -> list[SpecReference]: """Get_references_for_file.""" full_file_path = SpecFileService.full_file_path(process_model_info, file.name) file_contents: bytes = b"" with open(full_file_path) as f: file_contents = f.read().encode() - return cls.get_references_for_file_contents( - process_model_info, file.name, file_contents - ) + return cls.get_references_for_file_contents(process_model_info, file.name, file_contents) @classmethod def get_etree_from_xml_bytes(cls, binary_data: bytes) -> etree.Element: @@ -139,9 +129,7 @@ class SpecFileService(FileSystemService): has_lanes = sub_parser.has_lanes() is_executable = sub_parser.process_executable start_messages = sub_parser.start_messages() - is_primary = ( - sub_parser.get_id() == process_model_info.primary_process_id - ) + is_primary = sub_parser.get_id() == process_model_info.primary_process_id references.append( SpecReference( @@ -162,9 +150,7 @@ class SpecFileService(FileSystemService): return references @staticmethod - def add_file( - process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes - ) -> File: + def add_file(process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes) -> File: """Add_file.""" # Same as update return SpecFileService.update_file(process_model_info, file_name, binary_data) @@ -177,28 +163,20 @@ class SpecFileService(FileSystemService): BpmnValidator() parser = MyCustomParser() try: - parser.add_bpmn_xml( - cls.get_etree_from_xml_bytes(binary_data), filename=file_name - ) + parser.add_bpmn_xml(cls.get_etree_from_xml_bytes(binary_data), filename=file_name) except Exception as exception: raise ProcessModelFileInvalidError( f"Received error trying to parse bpmn xml: {str(exception)}" ) from exception @classmethod - def update_file( - cls, process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes - ) -> File: + def update_file(cls, process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes) -> File: """Update_file.""" SpecFileService.assert_valid_file_name(file_name) cls.validate_bpmn_xml(file_name, binary_data) - references = cls.get_references_for_file_contents( - process_model_info, file_name, binary_data - ) - primary_process_ref = next( - (ref for ref in references if ref.is_primary and ref.is_executable), None - ) + references = cls.get_references_for_file_contents(process_model_info, file_name, binary_data) + primary_process_ref = next((ref for ref in references if ref.is_primary and ref.is_executable), None) SpecFileService.clear_caches_for_file(file_name, process_model_info) for ref in references: @@ -233,8 +211,7 @@ class SpecFileService(FileSystemService): full_file_path = SpecFileService.full_file_path(process_model_info, file_name) if not os.path.exists(full_file_path): raise ProcessModelFileNotFoundError( - f"No file found with name {file_name} in" - f" {process_model_info.display_name}" + f"No file found with name {file_name} in {process_model_info.display_name}" ) with open(full_file_path, "rb") as f_handle: spec_file_data = f_handle.read() @@ -243,9 +220,7 @@ class SpecFileService(FileSystemService): @staticmethod def full_file_path(spec: ProcessModelInfo, file_name: str) -> str: """File_path.""" - return os.path.abspath( - os.path.join(SpecFileService.workflow_path(spec), file_name) - ) + return os.path.abspath(os.path.join(SpecFileService.workflow_path(spec), file_name)) @staticmethod def last_modified(spec: ProcessModelInfo, file_name: str) -> datetime: @@ -288,13 +263,11 @@ class SpecFileService(FileSystemService): SpecFileService.update_correlation_cache(ref) @staticmethod - def clear_caches_for_file( - file_name: str, process_model_info: ProcessModelInfo - ) -> None: + def clear_caches_for_file(file_name: str, process_model_info: ProcessModelInfo) -> None: """Clear all caches related to a file.""" - db.session.query(SpecReferenceCache).filter( - SpecReferenceCache.file_name == file_name - ).filter(SpecReferenceCache.process_model_id == process_model_info.id).delete() + db.session.query(SpecReferenceCache).filter(SpecReferenceCache.file_name == file_name).filter( + SpecReferenceCache.process_model_id == process_model_info.id + ).delete() # fixme: likely the other caches should be cleared as well, but we don't have a clean way to do so yet. @staticmethod @@ -307,9 +280,7 @@ class SpecFileService(FileSystemService): def update_process_cache(ref: SpecReference) -> None: """Update_process_cache.""" process_id_lookup = ( - SpecReferenceCache.query.filter_by(identifier=ref.identifier) - .filter_by(type=ref.type) - .first() + SpecReferenceCache.query.filter_by(identifier=ref.identifier).filter_by(type=ref.type).first() ) if process_id_lookup is None: process_id_lookup = SpecReferenceCache.from_spec_reference(ref) @@ -317,9 +288,7 @@ class SpecFileService(FileSystemService): db.session.commit() else: if ref.relative_path != process_id_lookup.relative_path: - full_bpmn_file_path = SpecFileService.full_path_from_relative_path( - process_id_lookup.relative_path - ) + full_bpmn_file_path = SpecFileService.full_path_from_relative_path(process_id_lookup.relative_path) # if the old relative bpmn file no longer exists, then assume things were moved around # on the file system. Otherwise, assume it is a duplicate process id and error. if os.path.isfile(full_bpmn_file_path): @@ -351,11 +320,9 @@ class SpecFileService(FileSystemService): def update_message_trigger_cache(ref: SpecReference) -> None: """Assure we know which messages can trigger the start of a process.""" for message_name in ref.start_messages: - message_triggerable_process_model = ( - MessageTriggerableProcessModel.query.filter_by( - message_name=message_name, - ).first() - ) + message_triggerable_process_model = MessageTriggerableProcessModel.query.filter_by( + message_name=message_name, + ).first() if message_triggerable_process_model is None: message_triggerable_process_model = MessageTriggerableProcessModel( message_name=message_name, @@ -364,22 +331,16 @@ class SpecFileService(FileSystemService): db.session.add(message_triggerable_process_model) db.session.commit() else: - if ( - message_triggerable_process_model.process_model_identifier - != ref.process_model_id - ): + if message_triggerable_process_model.process_model_identifier != ref.process_model_id: raise ProcessModelFileInvalidError( - "Message model is already used to start process model" - f" {ref.process_model_id}" + f"Message model is already used to start process model {ref.process_model_id}" ) @staticmethod def update_correlation_cache(ref: SpecReference) -> None: """Update_correlation_cache.""" for name in ref.correlations.keys(): - correlation_property_retrieval_expressions = ref.correlations[name][ - "retrieval_expressions" - ] + correlation_property_retrieval_expressions = ref.correlations[name]["retrieval_expressions"] for cpre in correlation_property_retrieval_expressions: message_name = ref.messages.get(cpre["messageRef"], None) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index e6ae791e..ad70175f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -37,14 +37,10 @@ class TaskService: on_duplicate_key_stmt = None if current_app.config["SPIFFWORKFLOW_BACKEND_DATABASE_TYPE"] == "mysql": insert_stmt = mysql_insert(JsonDataModel).values(list_of_dicts) - on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update( - data=insert_stmt.inserted.data - ) + on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update(data=insert_stmt.inserted.data) else: insert_stmt = postgres_insert(JsonDataModel).values(list_of_dicts) - on_duplicate_key_stmt = insert_stmt.on_conflict_do_nothing( - index_elements=["hash"] - ) + on_duplicate_key_stmt = insert_stmt.on_conflict_do_nothing(index_elements=["hash"]) db.session.execute(on_duplicate_key_stmt) @classmethod @@ -61,17 +57,11 @@ class TaskService: """ new_properties_json = serializer.task_to_dict(spiff_task) spiff_task_data = new_properties_json.pop("data") - python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task( - spiff_task, serializer - ) + python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer) task_model.properties_json = new_properties_json task_model.state = TaskStateNames[new_properties_json["state"]] - json_data_dict = cls._update_task_data_on_task_model( - task_model, spiff_task_data, "json_data_hash" - ) - python_env_dict = cls._update_task_data_on_task_model( - task_model, python_env_data_dict, "python_env_data_hash" - ) + json_data_dict = cls._update_task_data_on_task_model(task_model, spiff_task_data, "json_data_hash") + python_env_dict = cls._update_task_data_on_task_model(task_model, python_env_data_dict, "python_env_data_hash") return [json_data_dict, python_env_dict] @classmethod @@ -81,16 +71,9 @@ class TaskService: process_instance: ProcessInstanceModel, serializer: BpmnWorkflowSerializer, bpmn_definition_to_task_definitions_mappings: dict, - ) -> Tuple[ - Optional[BpmnProcessModel], - TaskModel, - dict[str, TaskModel], - dict[str, JsonDataDict], - ]: + ) -> Tuple[Optional[BpmnProcessModel], TaskModel, dict[str, TaskModel], dict[str, JsonDataDict]]: spiff_task_guid = str(spiff_task.id) - task_model: Optional[TaskModel] = TaskModel.query.filter_by( - guid=spiff_task_guid - ).first() + task_model: Optional[TaskModel] = TaskModel.query.filter_by(guid=spiff_task_guid).first() bpmn_process = None new_task_models: dict[str, TaskModel] = {} new_json_data_dicts: dict[str, JsonDataDict] = {} @@ -103,9 +86,9 @@ class TaskService: ) task_model = TaskModel.query.filter_by(guid=spiff_task_guid).first() if task_model is None: - task_definition = bpmn_definition_to_task_definitions_mappings[ - spiff_task.workflow.spec.name - ][spiff_task.task_spec.name] + task_definition = bpmn_definition_to_task_definitions_mappings[spiff_task.workflow.spec.name][ + spiff_task.task_spec.name + ] task_model = TaskModel( guid=spiff_task_guid, bpmn_process_id=bpmn_process.id, @@ -115,9 +98,7 @@ class TaskService: return (bpmn_process, task_model, new_task_models, new_json_data_dicts) @classmethod - def task_subprocess( - cls, spiff_task: SpiffTask - ) -> Tuple[Optional[str], Optional[BpmnWorkflow]]: + def task_subprocess(cls, spiff_task: SpiffTask) -> Tuple[Optional[str], Optional[BpmnWorkflow]]: top_level_workflow = spiff_task.workflow._get_outermost_workflow() my_wf = spiff_task.workflow # This is the workflow the spiff_task is part of my_sp = None @@ -149,31 +130,25 @@ class TaskService: # check for bpmn_process_id because mypy doesn't realize bpmn_process can be None if process_instance.bpmn_process_id is None: spiff_workflow = spiff_task.workflow._get_outermost_workflow() - bpmn_process, new_task_models, new_json_data_dicts = ( - cls.add_bpmn_process( - bpmn_process_dict=serializer.workflow_to_dict(spiff_workflow), - process_instance=process_instance, - bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings, - spiff_workflow=spiff_workflow, - serializer=serializer, - ) + bpmn_process, new_task_models, new_json_data_dicts = cls.add_bpmn_process( + bpmn_process_dict=serializer.workflow_to_dict(spiff_workflow), + process_instance=process_instance, + bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings, + spiff_workflow=spiff_workflow, + serializer=serializer, ) else: - bpmn_process = BpmnProcessModel.query.filter_by( - guid=subprocess_guid - ).first() + bpmn_process = BpmnProcessModel.query.filter_by(guid=subprocess_guid).first() if bpmn_process is None: spiff_workflow = spiff_task.workflow - bpmn_process, new_task_models, new_json_data_dicts = ( - cls.add_bpmn_process( - bpmn_process_dict=serializer.workflow_to_dict(subprocess), - process_instance=process_instance, - bpmn_process_parent=process_instance.bpmn_process, - bpmn_process_guid=subprocess_guid, - bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings, - spiff_workflow=spiff_workflow, - serializer=serializer, - ) + bpmn_process, new_task_models, new_json_data_dicts = cls.add_bpmn_process( + bpmn_process_dict=serializer.workflow_to_dict(subprocess), + process_instance=process_instance, + bpmn_process_parent=process_instance.bpmn_process, + bpmn_process_guid=subprocess_guid, + bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings, + spiff_workflow=spiff_workflow, + serializer=serializer, ) return (bpmn_process, new_task_models, new_json_data_dicts) @@ -221,9 +196,7 @@ class TaskService: bpmn_process.properties_json = bpmn_process_dict bpmn_process_data_json = json.dumps(bpmn_process_data_dict, sort_keys=True) - bpmn_process_data_hash = sha256( - bpmn_process_data_json.encode("utf8") - ).hexdigest() + bpmn_process_data_hash = sha256(bpmn_process_data_json.encode("utf8")).hexdigest() if bpmn_process.json_data_hash != bpmn_process_data_hash: new_json_data_dicts[bpmn_process_data_hash] = { "hash": bpmn_process_data_hash, @@ -272,9 +245,7 @@ class TaskService: if json_data_dict is not None: new_json_data_dicts[json_data_dict["hash"]] = json_data_dict - python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task( - spiff_task, serializer - ) + python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer) python_env_dict = TaskService._update_task_data_on_task_model( task_model, python_env_data_dict, "python_env_data_hash" ) @@ -303,9 +274,9 @@ class TaskService: spiff_task: SpiffTask, bpmn_definition_to_task_definitions_mappings: dict, ) -> TaskModel: - task_definition = bpmn_definition_to_task_definitions_mappings[ - spiff_task.workflow.spec.name - ][spiff_task.task_spec.name] + task_definition = bpmn_definition_to_task_definitions_mappings[spiff_task.workflow.spec.name][ + spiff_task.task_spec.name + ] task_model = TaskModel( guid=str(spiff_task.id), bpmn_process_id=bpmn_process.id, @@ -318,9 +289,7 @@ class TaskService: def _get_python_env_data_dict_from_spiff_task( cls, spiff_task: SpiffTask, serializer: BpmnWorkflowSerializer ) -> dict: - user_defined_state = ( - spiff_task.workflow.script_engine.environment.user_defined_state() - ) + user_defined_state = spiff_task.workflow.script_engine.environment.user_defined_state() # this helps to convert items like datetime objects to be json serializable converted_data: dict = serializer.data_converter.convert(user_defined_state) return converted_data diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py index 3735b97e..2ddc861a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py @@ -35,9 +35,7 @@ class UserService: ) -> UserModel: """Create_user.""" user_model: Optional[UserModel] = ( - UserModel.query.filter(UserModel.service == service) - .filter(UserModel.service_id == service_id) - .first() + UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first() ) if user_model is None: if username == "": @@ -89,19 +87,13 @@ class UserService: def current_user() -> Any: """Current_user.""" if not UserService.has_user(): - raise ApiError( - "logged_out", "You are no longer logged in.", status_code=401 - ) + raise ApiError("logged_out", "You are no longer logged in.", status_code=401) return g.user @staticmethod def get_principal_by_user_id(user_id: int) -> PrincipalModel: """Get_principal_by_user_id.""" - principal = ( - db.session.query(PrincipalModel) - .filter(PrincipalModel.user_id == user_id) - .first() - ) + principal = db.session.query(PrincipalModel).filter(PrincipalModel.user_id == user_id).first() if isinstance(principal, PrincipalModel): return principal raise ApiError( @@ -110,14 +102,10 @@ class UserService: ) @classmethod - def create_principal( - cls, child_id: int, id_column_name: str = "user_id" - ) -> PrincipalModel: + def create_principal(cls, child_id: int, id_column_name: str = "user_id") -> PrincipalModel: """Create_principal.""" column = PrincipalModel.__table__.columns[id_column_name] - principal: Optional[PrincipalModel] = PrincipalModel.query.filter( - column == child_id - ).first() + principal: Optional[PrincipalModel] = PrincipalModel.query.filter(column == child_id).first() if principal is None: principal = PrincipalModel() setattr(principal, id_column_name, child_id) @@ -136,12 +124,7 @@ class UserService: @classmethod def add_user_to_group(cls, user: UserModel, group: GroupModel) -> None: """Add_user_to_group.""" - exists = ( - UserGroupAssignmentModel() - .query.filter_by(user_id=user.id) - .filter_by(group_id=group.id) - .count() - ) + exists = UserGroupAssignmentModel().query.filter_by(user_id=user.id).filter_by(group_id=group.id).count() if not exists: ugam = UserGroupAssignmentModel(user_id=user.id, group_id=group.id) db.session.add(ugam) @@ -151,15 +134,10 @@ class UserService: def add_waiting_group_assignment(cls, username: str, group: GroupModel) -> None: """Add_waiting_group_assignment.""" wugam = ( - UserGroupAssignmentWaitingModel() - .query.filter_by(username=username) - .filter_by(group_id=group.id) - .first() + UserGroupAssignmentWaitingModel().query.filter_by(username=username).filter_by(group_id=group.id).first() ) if not wugam: - wugam = UserGroupAssignmentWaitingModel( - username=username, group_id=group.id - ) + wugam = UserGroupAssignmentWaitingModel(username=username, group_id=group.id) db.session.add(wugam) db.session.commit() if wugam.is_match_all(): @@ -179,10 +157,7 @@ class UserService: db.session.delete(assignment) wildcard = ( UserGroupAssignmentWaitingModel() - .query.filter( - UserGroupAssignmentWaitingModel.username - == UserGroupAssignmentWaitingModel.MATCH_ALL_USERS - ) + .query.filter(UserGroupAssignmentWaitingModel.username == UserGroupAssignmentWaitingModel.MATCH_ALL_USERS) .all() ) for assignment in wildcard: @@ -190,14 +165,10 @@ class UserService: db.session.commit() @staticmethod - def get_user_by_service_and_service_id( - service: str, service_id: str - ) -> Optional[UserModel]: + def get_user_by_service_and_service_id(service: str, service_id: str) -> Optional[UserModel]: """Get_user_by_service_and_service_id.""" user: UserModel = ( - UserModel.query.filter(UserModel.service == service) - .filter(UserModel.service_id == service_id) - .first() + UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first() ) if user: return user @@ -211,8 +182,6 @@ class UserService: HumanTaskModel.lane_assignment_id.in_(group_ids) # type: ignore ).all() for human_task in human_tasks: - human_task_user = HumanTaskUserModel( - user_id=user.id, human_task_id=human_task.id - ) + human_task_user = HumanTaskUserModel(user_id=user.id, human_task_id=human_task.id) db.session.add(human_task_user) db.session.commit() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 63a54bae..cbcd60da 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -62,9 +62,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): ) -> None: self.secondary_engine_step_delegate = secondary_engine_step_delegate self.process_instance = process_instance - self.bpmn_definition_to_task_definitions_mappings = ( - bpmn_definition_to_task_definitions_mappings - ) + self.bpmn_definition_to_task_definitions_mappings = bpmn_definition_to_task_definitions_mappings self.current_task_model: Optional[TaskModel] = None self.task_models: dict[str, TaskModel] = {} @@ -78,9 +76,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): """ return self.process_instance.bpmn_process_id is not None - def _update_json_data_dicts_using_list( - self, json_data_dict_list: list[Optional[JsonDataDict]] - ) -> None: + def _update_json_data_dicts_using_list(self, json_data_dict_list: list[Optional[JsonDataDict]]) -> None: for json_data_dict in json_data_dict_list: if json_data_dict is not None: self.json_data_dicts[json_data_dict["hash"]] = json_data_dict @@ -105,9 +101,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): def did_complete_task(self, spiff_task: SpiffTask) -> None: if self.current_task_model and self.should_update_task_model(): self.current_task_model.end_in_seconds = time.time() - json_data_dict_list = TaskService.update_task_model( - self.current_task_model, spiff_task, self.serializer - ) + json_data_dict_list = TaskService.update_task_model(self.current_task_model, spiff_task, self.serializer) self._update_json_data_dicts_using_list(json_data_dict_list) self.task_models[self.current_task_model.guid] = self.current_task_model if self.secondary_engine_step_delegate: @@ -126,11 +120,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): if self.should_update_task_model(): # excludes FUTURE and COMPLETED. the others were required to get PP1 to go to completion. for waiting_spiff_task in bpmn_process_instance.get_tasks( - TaskState.WAITING - | TaskState.CANCELLED - | TaskState.READY - | TaskState.MAYBE - | TaskState.LIKELY + TaskState.WAITING | TaskState.CANCELLED | TaskState.READY | TaskState.MAYBE | TaskState.LIKELY ): bpmn_process, task_model, new_task_models, new_json_data_dicts = ( TaskService.find_or_create_task_model_from_spiff_task( @@ -142,9 +132,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): ) self.task_models.update(new_task_models) self.json_data_dicts.update(new_json_data_dicts) - json_data_dict_list = TaskService.update_task_model( - task_model, waiting_spiff_task, self.serializer - ) + json_data_dict_list = TaskService.update_task_model(task_model, waiting_spiff_task, self.serializer) self.task_models[task_model.guid] = task_model self._update_json_data_dicts_using_list(json_data_dict_list) @@ -180,9 +168,8 @@ class StepDetailLoggingDelegate(EngineStepDelegate): } def should_log(self, spiff_task: SpiffTask) -> bool: - return ( - spiff_task.task_spec.spec_type in self.tasks_to_log - and not spiff_task.task_spec.name.endswith(".EndJoin") + return spiff_task.task_spec.spec_type in self.tasks_to_log and not spiff_task.task_spec.name.endswith( + ".EndJoin" ) def will_complete_task(self, spiff_task: SpiffTask) -> None: @@ -193,9 +180,7 @@ class StepDetailLoggingDelegate(EngineStepDelegate): def did_complete_task(self, spiff_task: SpiffTask) -> None: if self.should_log(spiff_task): self.step_details.append( - self.spiff_step_details_mapping( - spiff_task, self.current_task_start_in_seconds, time.time() - ) + self.spiff_step_details_mapping(spiff_task, self.current_task_start_in_seconds, time.time()) ) def save(self, commit: bool = True) -> None: @@ -211,9 +196,7 @@ class ExecutionStrategy: """__init__.""" self.delegate = delegate - def do_engine_steps( - self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None - ) -> None: + def do_engine_steps(self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None) -> None: pass def save(self) -> None: @@ -223,9 +206,7 @@ class ExecutionStrategy: class GreedyExecutionStrategy(ExecutionStrategy): """The common execution strategy. This will greedily run all engine steps without stopping.""" - def do_engine_steps( - self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None - ) -> None: + def do_engine_steps(self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None) -> None: bpmn_process_instance.do_engine_steps( exit_at=exit_at, will_complete_task=self.delegate.will_complete_task, @@ -241,9 +222,7 @@ class RunUntilServiceTaskExecutionStrategy(ExecutionStrategy): return (to an interstitial page). The background processor would then take over. """ - def do_engine_steps( - self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None - ) -> None: + def do_engine_steps(self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None) -> None: engine_steps = list( [ t @@ -270,9 +249,7 @@ class RunUntilServiceTaskExecutionStrategy(ExecutionStrategy): self.delegate.after_engine_steps(bpmn_process_instance) -def execution_strategy_named( - name: str, delegate: EngineStepDelegate -) -> ExecutionStrategy: +def execution_strategy_named(name: str, delegate: EngineStepDelegate) -> ExecutionStrategy: cls = { "greedy": GreedyExecutionStrategy, "run_until_service_task": RunUntilServiceTaskExecutionStrategy, @@ -305,9 +282,7 @@ class WorkflowExecutionService: def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None: """Do_engine_steps.""" - with safe_assertion( - ProcessInstanceLockService.has_lock(self.process_instance_model.id) - ) as tripped: + with safe_assertion(ProcessInstanceLockService.has_lock(self.process_instance_model.id)) as tripped: if tripped: raise AssertionError( "The current thread has not obtained a lock for this process" @@ -364,9 +339,7 @@ class WorkflowExecutionService: def queue_waiting_receive_messages(self) -> None: """Queue_waiting_receive_messages.""" waiting_events = self.bpmn_process_instance.waiting_events() - waiting_message_events = filter( - lambda e: e["event_type"] == "Message", waiting_events - ) + waiting_message_events = filter(lambda e: e["event_type"] == "Message", waiting_events) for event in waiting_message_events: # Ensure we are only creating one message instance for each waiting message diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py index 704d7379..5f483fdd 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py @@ -54,9 +54,7 @@ class BaseTest: ) @staticmethod - def logged_in_headers( - user: UserModel, _redirect_url: str = "http://some/frontend/url" - ) -> Dict[str, str]: + def logged_in_headers(user: UserModel, _redirect_url: str = "http://some/frontend/url") -> Dict[str, str]: """Logged_in_headers.""" return dict(Authorization="Bearer " + user.encode_auth_token()) @@ -80,9 +78,7 @@ class BaseTest: if bpmn_file_location is None: bpmn_file_location = process_model_id - self.create_process_group( - client, user, process_group_description, process_group_display_name - ) + self.create_process_group(client, user, process_group_description, process_group_display_name) self.create_process_model_with_api( client, @@ -108,9 +104,7 @@ class BaseTest: display_name: str = "", ) -> str: """Create_process_group.""" - process_group = ProcessGroup( - id=process_group_id, display_name=display_name, display_order=0, admin=False - ) + process_group = ProcessGroup(id=process_group_id, display_name=display_name, display_order=0, admin=False) response = client.post( "/v1.0/process-groups", headers=self.logged_in_headers(user), @@ -139,9 +133,7 @@ class BaseTest: # make sure we have a group process_group_id, _ = os.path.split(process_model_id) modified_process_group_id = process_group_id.replace("/", ":") - process_group_path = os.path.abspath( - os.path.join(FileSystemService.root_path(), process_group_id) - ) + process_group_path = os.path.abspath(os.path.join(FileSystemService.root_path(), process_group_id)) if ProcessModelService.is_process_group(process_group_path): if exception_notification_addresses is None: exception_notification_addresses = [] @@ -171,14 +163,9 @@ class BaseTest: else: raise Exception("You must create the group first") else: - raise Exception( - "You must include the process_model_id, which must be a path to the" - " model" - ) + raise Exception("You must include the process_model_id, which must be a path to the model") - def get_test_data_file_full_path( - self, file_name: str, process_model_test_data_dir: str - ) -> str: + def get_test_data_file_full_path(self, file_name: str, process_model_test_data_dir: str) -> str: """Get_test_data_file_contents.""" return os.path.join( current_app.instance_path, @@ -190,13 +177,9 @@ class BaseTest: file_name, ) - def get_test_data_file_contents( - self, file_name: str, process_model_test_data_dir: str - ) -> bytes: + def get_test_data_file_contents(self, file_name: str, process_model_test_data_dir: str) -> bytes: """Get_test_data_file_contents.""" - file_full_path = self.get_test_data_file_full_path( - file_name, process_model_test_data_dir - ) + file_full_path = self.get_test_data_file_full_path(file_name, process_model_test_data_dir) with open(file_full_path, "rb") as file: return file.read() @@ -325,9 +308,7 @@ class BaseTest: ) -> UserModel: """Create_user_with_permission.""" user = BaseTest.find_or_create_user(username=username) - return cls.add_permissions_to_user( - user, target_uri=target_uri, permission_names=permission_names - ) + return cls.add_permissions_to_user(user, target_uri=target_uri, permission_names=permission_names) @classmethod def add_permissions_to_user( @@ -337,9 +318,7 @@ class BaseTest: permission_names: Optional[list[str]] = None, ) -> UserModel: """Add_permissions_to_user.""" - permission_target = AuthorizationService.find_or_create_permission_target( - target_uri - ) + permission_target = AuthorizationService.find_or_create_permission_target(target_uri) if permission_names is None: permission_names = [member.name for member in Permission] @@ -371,8 +350,6 @@ class BaseTest: """Modify_process_identifier_for_path_param.""" return ProcessModelInfo.modify_process_identifier_for_path_param(identifier) - def un_modify_modified_process_identifier_for_path_param( - self, modified_identifier: str - ) -> str: + def un_modify_modified_process_identifier_for_path_param(self, modified_identifier: str) -> str: """Un_modify_modified_process_model_id.""" return modified_identifier.replace(":", "/") diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/example_data.py b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/example_data.py index 4b0ee5fc..f0960185 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/example_data.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/example_data.py @@ -77,13 +77,9 @@ class ExampleDataLoader: try: file = open(file_path, "rb") data = file.read() - file_info = SpecFileService.add_file( - process_model_info=spec, file_name=filename, binary_data=data - ) + file_info = SpecFileService.add_file(process_model_info=spec, file_name=filename, binary_data=data) if is_primary: - references = SpecFileService.get_references_for_file( - file_info, spec - ) + references = SpecFileService.get_references_for_file(file_info, spec) spec.primary_process_id = references[0].identifier spec.primary_file_name = filename ProcessModelService.save_process_model(spec) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_for_good_errors.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_for_good_errors.py index eea6af3c..90cbac87 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_for_good_errors.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_for_good_errors.py @@ -22,9 +22,7 @@ class TestForGoodErrors(BaseTest): ) -> Any: """Returns the next available user task for a given process instance, if possible.""" human_tasks = ( - db.session.query(HumanTaskModel) - .filter(HumanTaskModel.process_instance_id == process_instance_id) - .all() + db.session.query(HumanTaskModel).filter(HumanTaskModel.process_instance_id == process_instance_id).all() ) assert len(human_tasks) > 0, "No human tasks found for process." human_task = human_tasks[0] @@ -59,9 +57,7 @@ class TestForGoodErrors(BaseTest): headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 - response = self.get_next_user_task( - process_instance_id, client, with_super_admin_user - ) + response = self.get_next_user_task(process_instance_id, client, with_super_admin_user) assert response.json is not None assert response.json["error_type"] == "TemplateSyntaxError" assert response.json["line_number"] == 3 @@ -88,9 +84,7 @@ class TestForGoodErrors(BaseTest): f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model.id)}/{process_instance.id}/run", headers=self.logged_in_headers(with_super_admin_user), ) - response = self.get_next_user_task( - process_instance.id, client, with_super_admin_user - ) + response = self.get_next_user_task(process_instance.id, client, with_super_admin_user) assert response.status_code == 400 assert response.json is not None diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py index f4aeddeb..17cf79cf 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py @@ -19,9 +19,7 @@ class TestLoggingService(BaseTest): """Test_process_instance_run.""" process_group_id = "test_logging_spiff_logger" process_model_id = "simple_script" - self.create_process_group( - client=client, user=with_super_admin_user, process_group_id=process_group_id - ) + self.create_process_group(client=client, user=with_super_admin_user, process_group_id=process_group_id) process_model_identifier = f"{process_group_id}/{process_model_id}" # create the model self.create_process_model_with_api( @@ -33,9 +31,7 @@ class TestLoggingService(BaseTest): ) bpmn_file_name = "simple_script.bpmn" - bpmn_file_data_bytes = self.get_test_data_file_contents( - bpmn_file_name, "simple_script" - ) + bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, "simple_script") # add bpmn to the model self.create_spec_file( client=client, diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_nested_groups.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_nested_groups.py index 90b5af88..c1926617 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_nested_groups.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_nested_groups.py @@ -49,9 +49,7 @@ class TestNestedGroups(BaseTest): f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), ) - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) + process_instance = ProcessInstanceService().get_process_instance(process_instance_id) assert process_instance modified_process_group_id = process_group_id.replace("/", ":") response = client.delete( @@ -61,10 +59,7 @@ class TestNestedGroups(BaseTest): assert response.status_code == 400 assert response.json["error_code"] == "existing_instances" assert "We cannot delete the group" in response.json["message"] - assert ( - "there are models with existing instances inside the group" - in response.json["message"] - ) + assert "there are models with existing instances inside the group" in response.json["message"] def test_delete_group_with_running_instance_in_nested_group( self, @@ -110,9 +105,7 @@ class TestNestedGroups(BaseTest): f"/v1.0/process-instances/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), ) - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) + process_instance = ProcessInstanceService().get_process_instance(process_instance_id) assert process_instance modified_process_group_id = process_group_id.replace("/", ":") response = client.delete( @@ -122,10 +115,7 @@ class TestNestedGroups(BaseTest): assert response.status_code == 400 assert response.json["error_code"] == "existing_instances" assert "We cannot delete the group" in response.json["message"] - assert ( - "there are models with existing instances inside the group" - in response.json["message"] - ) + assert "there are models with existing instances inside the group" in response.json["message"] def test_nested_groups( self, @@ -137,12 +127,8 @@ class TestNestedGroups(BaseTest): # /process-groups/{process_group_path}/show target_uri = "/v1.0/process-groups/group_a,group_b" user = self.find_or_create_user() - self.add_permissions_to_user( - user, target_uri=target_uri, permission_names=["read"] - ) - response = client.get( # noqa: F841 - target_uri, headers=self.logged_in_headers(user) - ) + self.add_permissions_to_user(user, target_uri=target_uri, permission_names=["read"]) + response = client.get(target_uri, headers=self.logged_in_headers(user)) # noqa: F841 def test_add_nested_group( self, @@ -268,11 +254,7 @@ class TestNestedGroups(BaseTest): target_uri = "/v1.0/process-groups/group_a" user = self.find_or_create_user() - self.add_permissions_to_user( - user, target_uri=target_uri, permission_names=["read"] - ) - response = client.get( # noqa: F841 - target_uri, headers=self.logged_in_headers(user) - ) + self.add_permissions_to_user(user, target_uri=target_uri, permission_names=["read"]) + response = client.get(target_uri, headers=self.logged_in_headers(user)) # noqa: F841 print("test_process_group_show: ") diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py index ce1655cb..4033c81f 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py @@ -24,9 +24,7 @@ class TestFlaskOpenId(BaseTest): response = client.get("/openid/.well-known/openid-configuration") discovered_urls = response.json assert "http://localhost/openid" == discovered_urls["issuer"] - assert ( - "http://localhost/openid/auth" == discovered_urls["authorization_endpoint"] - ) + assert "http://localhost/openid/auth" == discovered_urls["authorization_endpoint"] assert "http://localhost/openid/token" == discovered_urls["token_endpoint"] def test_get_login_page( @@ -70,8 +68,6 @@ class TestFlaskOpenId(BaseTest): assert "id_token" in response.json assert "refresh_token" in response.json - decoded_token = jwt.decode( - response.json["id_token"], options={"verify_signature": False} - ) + decoded_token = jwt.decode(response.json["id_token"], options={"verify_signature": False}) assert "iss" in decoded_token assert "email" in decoded_token diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index f450729c..a88570ee 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -61,9 +61,7 @@ class TestProcessApi(BaseTest): ) assert response.status_code == 403 - self.add_permissions_to_user( - user, target_uri="/v1.0/process-groups", permission_names=["read"] - ) + self.add_permissions_to_user(user, target_uri="/v1.0/process-groups", permission_names=["read"]) response = client.get( "/v1.0/process-groups", headers=self.logged_in_headers(user), @@ -84,9 +82,7 @@ class TestProcessApi(BaseTest): ) -> None: """Test_permissions_check.""" user = self.find_or_create_user() - self.add_permissions_to_user( - user, target_uri="/v1.0/process-groups", permission_names=["read"] - ) + self.add_permissions_to_user(user, target_uri="/v1.0/process-groups", permission_names=["read"]) request_body = { "requests_to_check": { "/v1.0/process-groups": ["GET", "POST"], @@ -120,9 +116,7 @@ class TestProcessApi(BaseTest): process_group_id = "test_process_group" process_group_display_name = "Test Process Group" # creates the group directory, and the json file - self.create_process_group( - client, with_super_admin_user, process_group_id, process_group_display_name - ) + self.create_process_group(client, with_super_admin_user, process_group_id, process_group_display_name) process_model_id = "sample" model_display_name = "Sample" @@ -146,9 +140,7 @@ class TestProcessApi(BaseTest): # add bpmn file to the model bpmn_file_name = "sample.bpmn" - bpmn_file_data_bytes = self.get_test_data_file_contents( - bpmn_file_name, "sample" - ) + bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, "sample") self.create_spec_file( client, process_model_id=process_model.id, @@ -175,14 +167,10 @@ class TestProcessApi(BaseTest): process_group_description = "Test Process Group" process_model_id = "sample" process_model_identifier = f"{process_group_id}/{process_model_id}" - self.create_process_group( - client, with_super_admin_user, process_group_id, process_group_description - ) + self.create_process_group(client, with_super_admin_user, process_group_id, process_group_description) text = "Create a Bug Tracker process model " - text += ( - "with a Bug Details form that collects summary, description, and priority" - ) + text += "with a Bug Details form that collects summary, description, and priority" body = {"natural_language_text": text} self.create_process_model_with_api( client, @@ -215,18 +203,12 @@ class TestProcessApi(BaseTest): assert os.path.exists(process_model_diagram) form_schema_json = os.path.join(process_model_path, "bug-details-schema.json") assert os.path.exists(form_schema_json) - form_uischema_json = os.path.join( - process_model_path, "bug-details-uischema.json" - ) + form_uischema_json = os.path.join(process_model_path, "bug-details-uischema.json") assert os.path.exists(form_uischema_json) - process_instance_report = ProcessInstanceReportModel.query.filter_by( - identifier="bug-tracker" - ).first() + process_instance_report = ProcessInstanceReportModel.query.filter_by(identifier="bug-tracker").first() assert process_instance_report is not None - report_column_accessors = [ - i["accessor"] for i in process_instance_report.report_metadata["columns"] - ] + report_column_accessors = [i["accessor"] for i in process_instance_report.report_metadata["columns"]] expected_column_accessors = [ "id", "process_model_display_name", @@ -253,9 +235,7 @@ class TestProcessApi(BaseTest): process_model_identifier = f"{process_group_id}/{process_model_id}" initial_primary_process_id = "sample" terminal_primary_process_id = "new_process_id" - self.create_process_group( - client=client, user=with_super_admin_user, process_group_id=process_group_id - ) + self.create_process_group(client=client, user=with_super_admin_user, process_group_id=process_group_id) bpmn_file_name = f"{process_model_id}.bpmn" bpmn_file_source_directory = process_model_id @@ -266,15 +246,11 @@ class TestProcessApi(BaseTest): ) assert process_model.primary_process_id == initial_primary_process_id - bpmn_file_data_bytes = self.get_test_data_file_contents( - bpmn_file_name, bpmn_file_source_directory - ) + bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, bpmn_file_source_directory) bpmn_file_data_string = bpmn_file_data_bytes.decode("utf-8") old_string = f'bpmn:process id="{initial_primary_process_id}"' new_string = f'bpmn:process id="{terminal_primary_process_id}"' - updated_bpmn_file_data_string = bpmn_file_data_string.replace( - old_string, new_string - ) + updated_bpmn_file_data_string = bpmn_file_data_string.replace(old_string, new_string) updated_bpmn_file_data_bytes = bytearray(updated_bpmn_file_data_string, "utf-8") data = {"file": (io.BytesIO(updated_bpmn_file_data_bytes), bpmn_file_name)} @@ -303,9 +279,7 @@ class TestProcessApi(BaseTest): process_group_description = "Test Process Group" process_model_id = "sample" process_model_identifier = f"{process_group_id}/{process_model_id}" - self.create_process_group( - client, with_super_admin_user, process_group_id, process_group_description - ) + self.create_process_group(client, with_super_admin_user, process_group_id, process_group_description) self.create_process_model_with_api( client, process_model_id=process_model_identifier, @@ -342,12 +316,8 @@ class TestProcessApi(BaseTest): process_model_identifier = f"{test_process_group_id}/{test_process_model_id}" modified_process_model_identifier = process_model_identifier.replace("/", ":") self.create_process_group(client, with_super_admin_user, test_process_group_id) - self.create_process_model_with_api( - client, process_model_identifier, user=with_super_admin_user - ) - bpmn_file_data_bytes = self.get_test_data_file_contents( - bpmn_file_name, bpmn_file_location - ) + self.create_process_model_with_api(client, process_model_identifier, user=with_super_admin_user) + bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, bpmn_file_location) self.create_spec_file( client=client, process_model_id=process_model_identifier, @@ -390,9 +360,7 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_process_model_update.""" - self.create_process_group( - client, with_super_admin_user, "test_process_group", "Test Process Group" - ) + self.create_process_group(client, with_super_admin_user, "test_process_group", "Test Process Group") process_model_identifier = "test_process_group/make_cookies" self.create_process_model_with_api( client, @@ -408,9 +376,7 @@ class TestProcessApi(BaseTest): process_model.display_name = "Updated Display Name" process_model.primary_file_name = "superduper.bpmn" process_model.primary_process_id = "superduper" - process_model.metadata_extraction_paths = [ - {"key": "extraction1", "path": "path1"} - ] + process_model.metadata_extraction_paths = [{"key": "extraction1", "path": "path1"}] modified_process_model_identifier = process_model_identifier.replace("/", ":") response = client.put( @@ -424,9 +390,7 @@ class TestProcessApi(BaseTest): assert response.json["display_name"] == "Updated Display Name" assert response.json["primary_file_name"] == "superduper.bpmn" assert response.json["primary_process_id"] == "superduper" - assert response.json["metadata_extraction_paths"] == [ - {"key": "extraction1", "path": "path1"} - ] + assert response.json["metadata_extraction_paths"] == [{"key": "extraction1", "path": "path1"}] def test_process_model_list_all( self, @@ -582,9 +546,7 @@ class TestProcessApi(BaseTest): assert response.json is not None # We should get 5 back, as one of the items in the cache is a decision. assert len(response.json) == 5 - simple_form = next( - p for p in response.json if p["identifier"] == "Process_WithForm" - ) + simple_form = next(p for p in response.json if p["identifier"] == "Process_WithForm") assert simple_form["display_name"] == "Process With Form" assert simple_form["process_model_id"] == "test_group_one/simple_form" assert simple_form["has_lanes"] is False @@ -668,9 +630,7 @@ class TestProcessApi(BaseTest): group_id = "test_process_group" group_display_name = "Test Group" - self.create_process_group( - client, with_super_admin_user, group_id, display_name=group_display_name - ) + self.create_process_group(client, with_super_admin_user, group_id, display_name=group_display_name) process_group = ProcessModelService.get_process_group(group_id) assert process_group.display_name == group_display_name @@ -700,9 +660,7 @@ class TestProcessApi(BaseTest): for i in range(5): group_id = f"test_process_group_{i}" group_display_name = f"Test Group {i}" - self.create_process_group( - client, with_super_admin_user, group_id, display_name=group_display_name - ) + self.create_process_group(client, with_super_admin_user, group_id, display_name=group_display_name) # get all groups response = client.get( @@ -775,9 +733,7 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_process_model_file_update.""" - process_model_identifier = self.create_group_and_model_with_bpmn( - client, with_super_admin_user - ) + process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user) modified_process_model_id = process_model_identifier.replace("/", ":") data = {"key1": "THIS DATA"} @@ -801,9 +757,7 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_process_model_file_update.""" - process_model_identifier = self.create_group_and_model_with_bpmn( - client, with_super_admin_user - ) + process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user) modified_process_model_id = process_model_identifier.replace("/", ":") data = {"file": (io.BytesIO(b""), "random_fact.svg")} @@ -831,9 +785,7 @@ class TestProcessApi(BaseTest): process_group_description = "Test Group" process_model_id = "random_fact" process_model_identifier = f"{process_group_id}/{process_model_id}" - self.create_process_group( - client, with_super_admin_user, process_group_id, process_group_description - ) + self.create_process_group(client, with_super_admin_user, process_group_id, process_group_description) self.create_process_model_with_api( client, process_model_id=process_model_identifier, @@ -879,16 +831,12 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_process_model_file_update.""" - process_model_identifier = self.create_group_and_model_with_bpmn( - client, with_super_admin_user - ) + process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user) # self.create_spec_file(client, user=with_super_admin_user) # process_model = load_test_spec("random_fact") bad_process_model_identifier = f"x{process_model_identifier}" - modified_bad_process_model_identifier = bad_process_model_identifier.replace( - "/", ":" - ) + modified_bad_process_model_identifier = bad_process_model_identifier.replace("/", ":") response = client.delete( f"/v1.0/process-models/{modified_bad_process_model_identifier}/files/random_fact.svg", follow_redirects=True, @@ -907,9 +855,7 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_process_model_file_update.""" - process_model_identifier = self.create_group_and_model_with_bpmn( - client, with_super_admin_user - ) + process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user) modified_process_model_identifier = process_model_identifier.replace("/", ":") response = client.delete( @@ -929,12 +875,8 @@ class TestProcessApi(BaseTest): with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - process_model_identifier = self.create_group_and_model_with_bpmn( - client, with_super_admin_user - ) - process_model = ProcessModelService.get_process_model( - process_model_id=process_model_identifier - ) + process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user) + process_model = ProcessModelService.get_process_model(process_model_id=process_model_identifier) modified_process_model_identifier = process_model_identifier.replace("/", ":") response = client.delete( @@ -955,9 +897,7 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_process_model_file_update.""" - process_model_identifier = self.create_group_and_model_with_bpmn( - client, with_super_admin_user - ) + process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user) modified_process_model_identifier = process_model_identifier.replace("/", ":") self.create_spec_file( @@ -992,9 +932,7 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_get_file.""" - process_model_identifier = self.create_group_and_model_with_bpmn( - client, with_super_admin_user - ) + process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user) modified_process_model_identifier = process_model_identifier.replace("/", ":") response = client.get( @@ -1014,9 +952,7 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_get_workflow_from_workflow_spec.""" - process_model_identifier = self.create_group_and_model_with_bpmn( - client, with_super_admin_user - ) + process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user) modified_process_model_identifier = process_model_identifier.replace("/", ":") response = client.post( @@ -1071,9 +1007,7 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_get_process_group_when_found.""" - process_model_identifier = self.create_group_and_model_with_bpmn( - client, with_super_admin_user - ) + process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user) process_group_id, process_model_id = os.path.split(process_model_identifier) response = client.get( @@ -1119,9 +1053,7 @@ class TestProcessApi(BaseTest): assert response.status_code == 200 assert response.json is not None assert response.json["id"] == "test_group_one/test_group_two" - assert response.json["parent_groups"] == [ - {"display_name": "test_group_one", "id": "test_group_one"} - ] + assert response.json["parent_groups"] == [{"display_name": "test_group_one", "id": "test_group_one"}] def test_get_process_model_when_found( self, @@ -1146,9 +1078,7 @@ class TestProcessApi(BaseTest): assert response.json["id"] == process_model_identifier assert len(response.json["files"]) == 1 assert response.json["files"][0]["name"] == "random_fact.bpmn" - assert response.json["parent_groups"] == [ - {"display_name": "test_group", "id": "test_group"} - ] + assert response.json["parent_groups"] == [{"display_name": "test_group", "id": "test_group"}] def test_get_process_model_when_not_found( self, @@ -1180,9 +1110,7 @@ class TestProcessApi(BaseTest): """Test_process_instance_create.""" test_process_model_id = "runs_without_input/sample" headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id_with_api( - client, test_process_model_id, headers - ) + response = self.create_process_instance_from_process_model_id_with_api(client, test_process_model_id, headers) assert response.json is not None assert response.json["updated_at_in_seconds"] is not None assert response.json["status"] == "not_started" @@ -1244,9 +1172,7 @@ class TestProcessApi(BaseTest): process_group_id=process_group_id, process_model_id=process_model_id, ) - modified_process_model_identifier = ( - self.modify_process_identifier_for_path_param(process_model_identifier) - ) + modified_process_model_identifier = self.modify_process_identifier_for_path_param(process_model_identifier) headers = self.logged_in_headers(with_super_admin_user) create_response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers @@ -1264,9 +1190,7 @@ class TestProcessApi(BaseTest): assert show_response.json is not None assert show_response.status_code == 200 file_system_root = FileSystemService.root_path() - file_path = ( - f"{file_system_root}/{process_model_identifier}/{process_model_id}.bpmn" - ) + file_path = f"{file_system_root}/{process_model_identifier}/{process_model_id}.bpmn" with open(file_path) as f_open: xml_file_contents = f_open.read() assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents @@ -1287,13 +1211,9 @@ class TestProcessApi(BaseTest): process_model_id=process_model_id, bpmn_file_location="call_activity_nested", ) - spec_reference = SpecReferenceCache.query.filter_by( - identifier="Level2b" - ).first() + spec_reference = SpecReferenceCache.query.filter_by(identifier="Level2b").first() assert spec_reference - modified_process_model_identifier = ( - self.modify_process_identifier_for_path_param(process_model_identifier) - ) + modified_process_model_identifier = self.modify_process_identifier_for_path_param(process_model_identifier) headers = self.logged_in_headers(with_super_admin_user) create_response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers @@ -1313,15 +1233,11 @@ class TestProcessApi(BaseTest): assert show_response.json is not None assert show_response.status_code == 200 file_system_root = FileSystemService.root_path() - process_instance_file_path = ( - f"{file_system_root}/{process_model_identifier}/{process_model_id}.bpmn" - ) + process_instance_file_path = f"{file_system_root}/{process_model_identifier}/{process_model_id}.bpmn" with open(process_instance_file_path) as f_open: xml_file_contents = f_open.read() assert show_response.json["bpmn_xml_file_contents"] != xml_file_contents - spec_reference_file_path = os.path.join( - file_system_root, spec_reference.relative_path - ) + spec_reference_file_path = os.path.join(file_system_root, spec_reference.relative_path) with open(spec_reference_file_path) as f_open: xml_file_contents = f_open.read() assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents @@ -1366,9 +1282,7 @@ class TestProcessApi(BaseTest): assert json_data assert json_data["status"] == "complete" process_instance_id = json_data["id"] - process_instance = ProcessInstanceModel.query.filter_by( - id=process_instance_id - ).first() + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first() assert process_instance processor = ProcessInstanceProcessor(process_instance) @@ -1418,9 +1332,7 @@ class TestProcessApi(BaseTest): ) assert response.json is not None - process_instance = ProcessInstanceModel.query.filter_by( - id=process_instance_id - ).first() + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first() processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) task = processor.get_all_user_tasks()[0] @@ -1439,18 +1351,14 @@ class TestProcessApi(BaseTest): f"/v1.0/messages/{message_model_identifier}", content_type="application/json", headers=self.logged_in_headers(with_super_admin_user), - data=json.dumps( - {"payload": payload, "process_instance_id": process_instance_id} - ), + data=json.dumps({"payload": payload, "process_instance_id": process_instance_id}), ) assert response.status_code == 200 json_data = response.json assert json_data assert json_data["status"] == "complete" process_instance_id = json_data["id"] - process_instance = ProcessInstanceModel.query.filter_by( - id=process_instance_id - ).first() + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first() assert process_instance processor = ProcessInstanceProcessor(process_instance) @@ -1495,9 +1403,7 @@ class TestProcessApi(BaseTest): assert response.json is not None process_instance_id = response.json["id"] - process_instance = ProcessInstanceModel.query.filter_by( - id=process_instance_id - ).first() + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first() processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) task = processor.get_all_user_tasks()[0] @@ -1518,9 +1424,7 @@ class TestProcessApi(BaseTest): f"/v1.0/messages/{message_model_identifier}", content_type="application/json", headers=self.logged_in_headers(with_super_admin_user), - data=json.dumps( - {"payload": payload, "process_instance_id": process_instance_id} - ), + data=json.dumps({"payload": payload, "process_instance_id": process_instance_id}), ) assert response.status_code == 400 assert response.json @@ -1539,9 +1443,7 @@ class TestProcessApi(BaseTest): assert json_data assert json_data["status"] == "complete" process_instance_id = json_data["id"] - process_instance = ProcessInstanceModel.query.filter_by( - id=process_instance_id - ).first() + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first() assert process_instance processor = ProcessInstanceProcessor(process_instance) process_instance_data = processor.get_data() @@ -1553,9 +1455,7 @@ class TestProcessApi(BaseTest): f"/v1.0/messages/{message_model_identifier}", content_type="application/json", headers=self.logged_in_headers(with_super_admin_user), - data=json.dumps( - {"payload": payload, "process_instance_id": process_instance_id} - ), + data=json.dumps({"payload": payload, "process_instance_id": process_instance_id}), ) assert response.status_code == 400 assert response.json @@ -1605,9 +1505,7 @@ class TestProcessApi(BaseTest): assert response.status_code == 200 assert response.json is not None - process_instance = ProcessInstanceModel.query.filter_by( - id=process_instance_id - ).first() + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first() assert process_instance assert process_instance.status == "terminated" @@ -1687,9 +1585,7 @@ class TestProcessApi(BaseTest): assert response.json["next_task"] is not None human_tasks = ( - db.session.query(HumanTaskModel) - .filter(HumanTaskModel.process_instance_id == process_instance_id) - .all() + db.session.query(HumanTaskModel).filter(HumanTaskModel.process_instance_id == process_instance_id).all() ) assert len(human_tasks) == 1 human_task = human_tasks[0] @@ -1699,10 +1595,7 @@ class TestProcessApi(BaseTest): ) assert response.status_code == 200 assert response.json is not None - assert ( - response.json["form_schema"]["definitions"]["Color"]["anyOf"][1]["title"] - == "Green" - ) + assert response.json["form_schema"]["definitions"]["Color"]["anyOf"][1]["title"] == "Green" # if you set this in task data: # form_ui_hidden_fields = ["veryImportantFieldButOnlySometimes", "building.floor"] @@ -1732,9 +1625,7 @@ class TestProcessApi(BaseTest): ) headers = self.logged_in_headers(with_super_admin_user) - self.create_process_instance_from_process_model_id_with_api( - client, process_model_identifier, headers - ) + self.create_process_instance_from_process_model_id_with_api(client, process_model_identifier, headers) response = client.get( "/v1.0/process-instances", @@ -1749,10 +1640,7 @@ class TestProcessApi(BaseTest): process_instance_dict = response.json["results"][0] assert type(process_instance_dict["id"]) is int - assert ( - process_instance_dict["process_model_identifier"] - == process_model_identifier - ) + assert process_instance_dict["process_model_identifier"] == process_model_identifier assert type(process_instance_dict["start_in_seconds"]) is int assert process_instance_dict["start_in_seconds"] > 0 assert process_instance_dict["end_in_seconds"] is None @@ -1779,21 +1667,11 @@ class TestProcessApi(BaseTest): bpmn_file_location=bpmn_file_location, ) headers = self.logged_in_headers(with_super_admin_user) - self.create_process_instance_from_process_model_id_with_api( - client, process_model_identifier, headers - ) - self.create_process_instance_from_process_model_id_with_api( - client, process_model_identifier, headers - ) - self.create_process_instance_from_process_model_id_with_api( - client, process_model_identifier, headers - ) - self.create_process_instance_from_process_model_id_with_api( - client, process_model_identifier, headers - ) - self.create_process_instance_from_process_model_id_with_api( - client, process_model_identifier, headers - ) + self.create_process_instance_from_process_model_id_with_api(client, process_model_identifier, headers) + self.create_process_instance_from_process_model_id_with_api(client, process_model_identifier, headers) + self.create_process_instance_from_process_model_id_with_api(client, process_model_identifier, headers) + self.create_process_instance_from_process_model_id_with_api(client, process_model_identifier, headers) + self.create_process_instance_from_process_model_id_with_api(client, process_model_identifier, headers) response = client.get( "/v1.0/process-instances?per_page=2&page=3", @@ -2095,9 +1973,7 @@ class TestProcessApi(BaseTest): ) -> Any: """Setup_testing_instance.""" headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id_with_api( - client, process_model_id, headers - ) + response = self.create_process_instance_from_process_model_id_with_api(client, process_model_id, headers) process_instance = response.json assert isinstance(process_instance, dict) process_instance_id = process_instance["id"] @@ -2124,15 +2000,9 @@ class TestProcessApi(BaseTest): bpmn_file_location=bpmn_file_location, ) - process_instance_id = self.setup_testing_instance( - client, process_model_identifier, with_super_admin_user - ) + process_instance_id = self.setup_testing_instance(client, process_model_identifier, with_super_admin_user) - process = ( - db.session.query(ProcessInstanceModel) - .filter(ProcessInstanceModel.id == process_instance_id) - .first() - ) + process = db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.id == process_instance_id).first() assert process is not None assert process.status == "not_started" @@ -2144,16 +2014,9 @@ class TestProcessApi(BaseTest): api_error = json.loads(response.get_data(as_text=True)) assert api_error["error_code"] == "task_error" - assert ( - 'TypeError:can only concatenate str (not "int") to str' - in api_error["message"] - ) + assert 'TypeError:can only concatenate str (not "int") to str' in api_error["message"] - process = ( - db.session.query(ProcessInstanceModel) - .filter(ProcessInstanceModel.id == process_instance_id) - .first() - ) + process = db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.id == process_instance_id).first() assert process is not None assert process.status == "error" @@ -2178,20 +2041,14 @@ class TestProcessApi(BaseTest): bpmn_file_location=bpmn_file_location, ) - process_instance_id = self.setup_testing_instance( - client, process_model_identifier, with_super_admin_user - ) + process_instance_id = self.setup_testing_instance(client, process_model_identifier, with_super_admin_user) process_model = ProcessModelService.get_process_model(process_model_identifier) ProcessModelService.update_process_model( process_model, {"fault_or_suspend_on_exception": NotificationType.suspend.value}, ) - process = ( - db.session.query(ProcessInstanceModel) - .filter(ProcessInstanceModel.id == process_instance_id) - .first() - ) + process = db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.id == process_instance_id).first() assert process is not None assert process.status == "not_started" @@ -2201,11 +2058,7 @@ class TestProcessApi(BaseTest): ) assert response.status_code == 400 - process = ( - db.session.query(ProcessInstanceModel) - .filter(ProcessInstanceModel.id == process_instance_id) - .first() - ) + process = db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.id == process_instance_id).first() assert process is not None assert process.status == "suspended" @@ -2238,9 +2091,7 @@ class TestProcessApi(BaseTest): assert response.status_code == 400 assert process_instance.status == "error" processor = ProcessInstanceProcessor(process_instance) - spiff_task = processor.get_task_by_bpmn_identifier( - "script_task_two", processor.bpmn_process_instance - ) + spiff_task = processor.get_task_by_bpmn_identifier("script_task_two", processor.bpmn_process_instance) assert spiff_task is not None assert spiff_task.data == {"my_var": "THE VAR"} @@ -2338,13 +2189,8 @@ class TestProcessApi(BaseTest): ) assert response.status_code == 200 assert response.json is not None - assert ( - len(response.json["results"]) == 2 - ) # Two messages, one is the completed receive, the other is new send - assert ( - response.json["results"][0]["process_instance_id"] - == process_instance_id_one - ) + assert len(response.json["results"]) == 2 # Two messages, one is the completed receive, the other is new send + assert response.json["results"][0]["process_instance_id"] == process_instance_id_one response = client.get( f"/v1.0/messages?process_instance_id={process_instance_id_two}", @@ -2353,10 +2199,7 @@ class TestProcessApi(BaseTest): assert response.status_code == 200 assert response.json is not None assert len(response.json["results"]) == 2 - assert ( - response.json["results"][0]["process_instance_id"] - == process_instance_id_two - ) + assert response.json["results"][0]["process_instance_id"] == process_instance_id_two response = client.get( "/v1.0/messages", @@ -2604,9 +2447,7 @@ class TestProcessApi(BaseTest): bpmn_file_location=bpmn_file_location, ) - bpmn_file_data_bytes = self.get_test_data_file_contents( - bpmn_file_name, bpmn_file_location - ) + bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, bpmn_file_location) self.create_spec_file( client=client, process_model_id=process_model_identifier, @@ -2628,27 +2469,21 @@ class TestProcessApi(BaseTest): headers=self.logged_in_headers(with_super_admin_user), ) - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) + process_instance = ProcessInstanceService().get_process_instance(process_instance_id) assert process_instance.status == "user_input_required" client.post( f"/v1.0/process-instance-suspend/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", headers=self.logged_in_headers(with_super_admin_user), ) - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) + process_instance = ProcessInstanceService().get_process_instance(process_instance_id) assert process_instance.status == "suspended" response = client.post( f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), ) - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) + process_instance = ProcessInstanceService().get_process_instance(process_instance_id) assert process_instance.status == "suspended" assert response.status_code == 400 @@ -2657,9 +2492,7 @@ class TestProcessApi(BaseTest): headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) + process_instance = ProcessInstanceService().get_process_instance(process_instance_id) assert process_instance.status == "waiting" def test_script_unit_test_run( @@ -2683,9 +2516,7 @@ class TestProcessApi(BaseTest): bpmn_file_location=bpmn_file_location, ) - bpmn_file_data_bytes = self.get_test_data_file_contents( - bpmn_file_name, bpmn_file_location - ) + bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, bpmn_file_location) self.create_spec_file( client=client, process_model_id=process_model_identifier, @@ -2741,9 +2572,7 @@ class TestProcessApi(BaseTest): bpmn_file_location=bpmn_file_location, ) - bpmn_file_data_bytes = self.get_test_data_file_contents( - bpmn_file_name, bpmn_file_location - ) + bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, bpmn_file_location) self.create_spec_file( client=client, process_model_id=process_model_identifier, @@ -2817,9 +2646,7 @@ class TestProcessApi(BaseTest): bpmn_file_location=bpmn_file_location, ) - bpmn_file_data_bytes = self.get_test_data_file_contents( - bpmn_file_name, bpmn_file_location - ) + bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, bpmn_file_location) self.create_spec_file( client=client, process_model_id=process_model_identifier, @@ -2868,16 +2695,12 @@ class TestProcessApi(BaseTest): ) assert response.json["status"] == "suspended" - def setup_initial_groups_for_move_tests( - self, client: FlaskClient, with_super_admin_user: UserModel - ) -> None: + def setup_initial_groups_for_move_tests(self, client: FlaskClient, with_super_admin_user: UserModel) -> None: """Setup_initial_groups_for_move_tests.""" groups = ["group_a", "group_b", "group_b/group_bb"] # setup initial groups for group in groups: - self.create_process_group( - client, with_super_admin_user, group, display_name=group - ) + self.create_process_group(client, with_super_admin_user, group, display_name=group) # make sure initial groups exist for group in groups: persisted = ProcessModelService.get_process_group(group) @@ -2913,9 +2736,7 @@ class TestProcessApi(BaseTest): # move model to `group_b/group_bb` new_location = "group_b/group_bb" new_process_model_path = f"{new_location}/{process_model_id}" - modified_original_process_model_id = original_process_model_path.replace( - "/", ":" - ) + modified_original_process_model_id = original_process_model_path.replace("/", ":") response = client.put( f"/v1.0/process-models/{modified_original_process_model_id}/move?new_location={new_location}", @@ -2930,9 +2751,7 @@ class TestProcessApi(BaseTest): assert e.value.args[0] == "process_model_not_found" # make sure the new model does exist - new_process_model = ProcessModelService.get_process_model( - new_process_model_path - ) + new_process_model = ProcessModelService.get_process_model(new_process_model_path) assert new_process_model is not None assert new_process_model.id == new_process_model_path @@ -2950,9 +2769,7 @@ class TestProcessApi(BaseTest): sub_group_id = "sub_group" original_location = "group_a" original_sub_path = f"{original_location}/{sub_group_id}" - self.create_process_group( - client, with_super_admin_user, original_sub_path, display_name=sub_group_id - ) + self.create_process_group(client, with_super_admin_user, original_sub_path, display_name=sub_group_id) # make sure original subgroup exists persisted = ProcessModelService.get_process_group(original_sub_path) assert persisted is not None @@ -3111,9 +2928,7 @@ class TestProcessApi(BaseTest): ) -> None: """Test_can_get_process_instance_list_with_report_metadata.""" process_model = load_test_spec( - process_model_id=( - "save_process_instance_metadata/save_process_instance_metadata" - ), + process_model_id="save_process_instance_metadata/save_process_instance_metadata", bpmn_file_name="save_process_instance_metadata.bpmn", process_model_source_directory="save_process_instance_metadata", ) @@ -3172,21 +2987,13 @@ class TestProcessApi(BaseTest): user_one = self.create_user_with_permission(username="user_one") process_model = load_test_spec( - process_model_id=( - "save_process_instance_metadata/save_process_instance_metadata" - ), + process_model_id="save_process_instance_metadata/save_process_instance_metadata", bpmn_file_name="save_process_instance_metadata.bpmn", process_model_source_directory="save_process_instance_metadata", ) - self.create_process_instance_from_process_model( - process_model=process_model, user=user_one - ) - self.create_process_instance_from_process_model( - process_model=process_model, user=user_one - ) - self.create_process_instance_from_process_model( - process_model=process_model, user=with_super_admin_user - ) + self.create_process_instance_from_process_model(process_model=process_model, user=user_one) + self.create_process_instance_from_process_model(process_model=process_model, user=user_one) + self.create_process_instance_from_process_model(process_model=process_model, user=with_super_admin_user) dne_report_metadata = { "columns": [ @@ -3224,12 +3031,10 @@ class TestProcessApi(BaseTest): report_metadata=dne_report_metadata, user=user_one, ) - process_instance_report_user_one = ( - ProcessInstanceReportModel.create_with_attributes( - identifier="user_one_report", - report_metadata=user_one_report_metadata, - user=user_one, - ) + process_instance_report_user_one = ProcessInstanceReportModel.create_with_attributes( + identifier="user_one_report", + report_metadata=user_one_report_metadata, + user=user_one, ) response = client.get( @@ -3239,14 +3044,8 @@ class TestProcessApi(BaseTest): assert response.json is not None assert response.status_code == 200 assert len(response.json["results"]) == 2 - assert ( - response.json["results"][0]["process_initiator_username"] - == user_one.username - ) - assert ( - response.json["results"][1]["process_initiator_username"] - == user_one.username - ) + assert response.json["results"][0]["process_initiator_username"] == user_one.username + assert response.json["results"][1]["process_initiator_username"] == user_one.username response = client.get( f"/v1.0/process-instances?report_identifier={process_instance_report_dne.identifier}", @@ -3265,9 +3064,7 @@ class TestProcessApi(BaseTest): ) -> None: """Test_can_get_process_instance_list_with_report_metadata.""" process_model = load_test_spec( - process_model_id=( - "save_process_instance_metadata/save_process_instance_metadata" - ), + process_model_id="save_process_instance_metadata/save_process_instance_metadata", bpmn_file_name="save_process_instance_metadata.bpmn", process_model_source_directory="save_process_instance_metadata", ) @@ -3317,9 +3114,7 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_process_instance_list_can_order_by_metadata.""" - self.create_process_group( - client, with_super_admin_user, "test_group", "test_group" - ) + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/hello_world", process_model_source_directory="nested-task-data-structure", @@ -3333,15 +3128,11 @@ class TestProcessApi(BaseTest): }, ) - process_instance_one = self.create_process_instance_from_process_model( - process_model - ) + process_instance_one = self.create_process_instance_from_process_model(process_model) processor = ProcessInstanceProcessor(process_instance_one) processor.do_engine_steps(save=True) assert process_instance_one.status == "complete" - process_instance_two = self.create_process_instance_from_process_model( - process_model - ) + process_instance_two = self.create_process_instance_from_process_model(process_model) processor = ProcessInstanceProcessor(process_instance_two) processor.do_engine_steps(save=True) assert process_instance_two.status == "complete" @@ -3405,9 +3196,7 @@ class TestProcessApi(BaseTest): "test_group/data_object_test", process_model_source_directory="data_object_test", ) - process_instance_one = self.create_process_instance_from_process_model( - process_model - ) + process_instance_one = self.create_process_instance_from_process_model(process_model) processor = ProcessInstanceProcessor(process_instance_one) processor.do_engine_steps(save=True) assert process_instance_one.status == "user_input_required" diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_instances_controller.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_instances_controller.py index 8cb1768a..4ddb38d3 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_instances_controller.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_instances_controller.py @@ -18,21 +18,15 @@ class TestProcessInstancesController(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_user_search_returns_a_user.""" - user_one = self.create_user_with_permission( - username="user_one", target_uri="/process-instances/find-by-id/*" - ) - user_two = self.create_user_with_permission( - username="user_two", target_uri="/process-instances/find-by-id/*" - ) + user_one = self.create_user_with_permission(username="user_one", target_uri="/process-instances/find-by-id/*") + user_two = self.create_user_with_permission(username="user_two", target_uri="/process-instances/find-by-id/*") process_model = load_test_spec( process_model_id="group/sample", bpmn_file_name="sample.bpmn", process_model_source_directory="sample", ) - process_instance = self.create_process_instance_from_process_model( - process_model=process_model, user=user_one - ) + process_instance = self.create_process_instance_from_process_model(process_model=process_model, user=user_one) response = client.get( f"/v1.0/process-instances/find-by-id/{process_instance.id}", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_secret_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_secret_service.py index 8d11fa49..3e19607d 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_secret_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_secret_service.py @@ -32,9 +32,7 @@ class SecretServiceTestHelpers(BaseTest): """Add_test_secret.""" return SecretService().add_secret(self.test_key, self.test_value, user.id) - def add_test_process( - self, client: FlaskClient, user: UserModel - ) -> ProcessModelInfo: + def add_test_process(self, client: FlaskClient, user: UserModel) -> ProcessModelInfo: """Add_test_process.""" self.create_process_group( client, @@ -42,9 +40,7 @@ class SecretServiceTestHelpers(BaseTest): self.test_process_group_id, display_name=self.test_process_group_display_name, ) - process_model_identifier = ( - f"{self.test_process_group_id}/{self.test_process_model_id}" - ) + process_model_identifier = f"{self.test_process_group_id}/{self.test_process_model_id}" self.create_process_model_with_api( client, process_model_id=process_model_identifier, @@ -52,9 +48,7 @@ class SecretServiceTestHelpers(BaseTest): process_model_description=self.test_process_model_description, user=user, ) - process_model_info = ProcessModelService.get_process_model( - process_model_identifier - ) + process_model_info = ProcessModelService.get_process_model(process_model_identifier) return process_model_info @@ -124,14 +118,10 @@ class TestSecretService(SecretServiceTestHelpers): secret = SecretService.get_secret(self.test_key) assert secret assert SecretService._decrypt(secret.value) == self.test_value - SecretService.update_secret( - self.test_key, "new_secret_value", with_super_admin_user.id - ) + SecretService.update_secret(self.test_key, "new_secret_value", with_super_admin_user.id) new_secret = SecretService.get_secret(self.test_key) assert new_secret - assert ( - SecretService._decrypt(new_secret.value) == "new_secret_value" - ) # noqa: S105 + assert SecretService._decrypt(new_secret.value) == "new_secret_value" # noqa: S105 def test_update_secret_bad_secret_fails( self, @@ -143,9 +133,7 @@ class TestSecretService(SecretServiceTestHelpers): """Test_update_secret_bad_secret_fails.""" secret = self.add_test_secret(with_super_admin_user) with pytest.raises(ApiError) as ae: - SecretService.update_secret( - secret.key + "x", "some_new_value", with_super_admin_user.id - ) + SecretService.update_secret(secret.key + "x", "some_new_value", with_super_admin_user.id) assert "Resource does not exist" in ae.value.message assert ae.value.error_code == "update_secret_error" @@ -253,9 +241,7 @@ class TestSecretServiceApi(SecretServiceTestHelpers): ) assert response.status_code == 200 - secret_model = SecretModel.query.filter( - SecretModel.key == self.test_key - ).first() + secret_model = SecretModel.query.filter(SecretModel.key == self.test_key).first() assert SecretService._decrypt(secret_model.value) == "new_secret_value" def test_delete_secret( diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py index 9dd416fc..e72182bf 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py @@ -70,7 +70,5 @@ class TestGetAllPermissions(BaseTest): permissions = GetAllPermissions().run(script_attributes_context) sorted_permissions = sorted(permissions, key=itemgetter("uri")) - sorted_expected_permissions = sorted( - expected_permissions, key=itemgetter("uri") - ) + sorted_expected_permissions = sorted(expected_permissions, key=itemgetter("uri")) assert sorted_permissions == sorted_expected_permissions diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_group_members.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_group_members.py index 6e57b1bf..3a128cff 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_group_members.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_group_members.py @@ -38,9 +38,7 @@ class TestGetGroupMembers(BaseTest): UserService.add_user_to_group(testuser2, group_a) UserService.add_user_to_group(testuser3, group_b) - self.create_process_group( - client, with_super_admin_user, "test_group", "test_group" - ) + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( process_model_id="test_group/get_group_members", bpmn_file_name="get_group_members.bpmn", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_last_user_completing_task.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_last_user_completing_task.py index d6533eae..5f0e40d3 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_last_user_completing_task.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_last_user_completing_task.py @@ -23,9 +23,7 @@ class TestGetLastUserCompletingTask(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_sets_permission_correctly_on_human_task.""" - self.create_process_group( - client, with_super_admin_user, "test_group", "test_group" - ) + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") assert initiator_user.principal is not None AuthorizationService.import_permissions_from_yaml_file() @@ -49,21 +47,14 @@ class TestGetLastUserCompletingTask(BaseTest): spiff_task = processor.__class__.get_task_by_bpmn_identifier( human_task.task_name, processor.bpmn_process_instance ) - ProcessInstanceService.complete_form_task( - processor, spiff_task, {"name": "HEY"}, initiator_user, human_task - ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {"name": "HEY"}, initiator_user, human_task) assert len(process_instance.active_human_tasks) == 1 human_task = process_instance.active_human_tasks[0] spiff_task = processor.__class__.get_task_by_bpmn_identifier( human_task.task_name, processor.bpmn_process_instance ) - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, human_task - ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task) assert spiff_task is not None - assert ( - initiator_user.username - == spiff_task.get_data("user_completing_task")["username"] - ) + assert initiator_user.username == spiff_task.get_data("user_completing_task")["username"] diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py index 8116ec42..31d2aa69 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py @@ -54,9 +54,7 @@ class TestGetLocaltime(BaseTest): target_uri="/v1.0/process-groups", permission_names=["read", "create"], ) - self.create_process_group( - client=client, user=initiator_user, process_group_id="test_group" - ) + self.create_process_group(client=client, user=initiator_user, process_group_id="test_group") process_model = load_test_spec( process_model_id="test_group/get_localtime", bpmn_file_name="get_localtime.bpmn", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_process_initiator_user.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_process_initiator_user.py index 1a3cd92c..84ac7c27 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_process_initiator_user.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_process_initiator_user.py @@ -23,9 +23,7 @@ class TestGetProcessInitiatorUser(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_sets_permission_correctly_on_human_task.""" - self.create_process_group( - client, with_super_admin_user, "test_group", "test_group" - ) + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") assert initiator_user.principal is not None AuthorizationService.import_permissions_from_yaml_file() @@ -49,12 +47,7 @@ class TestGetProcessInitiatorUser(BaseTest): spiff_task = processor.__class__.get_task_by_bpmn_identifier( human_task.task_name, processor.bpmn_process_instance ) - ProcessInstanceService.complete_form_task( - processor, spiff_task, {"name": "HEY"}, initiator_user, human_task - ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {"name": "HEY"}, initiator_user, human_task) assert spiff_task is not None - assert ( - initiator_user.username - == spiff_task.get_data("process_initiator_user")["username"] - ) + assert initiator_user.username == spiff_task.get_data("process_initiator_user")["username"] diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py index 738896cd..d0202a64 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py @@ -24,13 +24,9 @@ class TestSaveProcessInstanceMetadata(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_save_process_instance_metadata.""" - self.create_process_group( - client, with_super_admin_user, "test_group", "test_group" - ) + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( - process_model_id=( - "save_process_instance_metadata/save_process_instance_metadata" - ), + process_model_id="save_process_instance_metadata/save_process_instance_metadata", bpmn_file_name="save_process_instance_metadata.bpmn", process_model_source_directory="save_process_instance_metadata", ) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_acceptance_test_fixtures.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_acceptance_test_fixtures.py index e4eeecf1..b4dd66e0 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_acceptance_test_fixtures.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_acceptance_test_fixtures.py @@ -13,15 +13,11 @@ from spiffworkflow_backend.services.process_model_service import ProcessModelSer def test_start_dates_are_one_hour_apart(app: Flask) -> None: """Test_start_dates_are_one_hour_apart.""" - process_model_identifier = ( - "misc/acceptance-tests-group-one/acceptance-tests-model-1" - ) + process_model_identifier = "misc/acceptance-tests-group-one/acceptance-tests-model-1" group_identifier = os.path.dirname(process_model_identifier) parent_group_identifier = os.path.dirname(group_identifier) if not ProcessModelService.is_process_group(parent_group_identifier): - process_group = ProcessGroup( - id=parent_group_identifier, display_name=parent_group_identifier - ) + process_group = ProcessGroup(id=parent_group_identifier, display_name=parent_group_identifier) ProcessModelService.add_process_group(process_group) if not ProcessModelService.is_process_group(group_identifier): process_group = ProcessGroup(id=group_identifier, display_name=group_identifier) @@ -38,6 +34,4 @@ def test_start_dates_are_one_hour_apart(app: Flask) -> None: assert len(process_instances) > 2 assert process_instances[0].start_in_seconds is not None assert process_instances[1].start_in_seconds is not None - assert (process_instances[0].start_in_seconds - 3600) == ( - process_instances[1].start_in_seconds - ) + assert (process_instances[0].start_in_seconds - 3600) == (process_instances[1].start_in_seconds) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py index 01b1cc3f..e13ad390 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py @@ -23,24 +23,16 @@ from spiffworkflow_backend.services.user_service import UserService class TestAuthorizationService(BaseTest): """TestAuthorizationService.""" - def test_can_raise_if_missing_user( - self, app: Flask, with_db_and_bpmn_file_cleanup: None - ) -> None: + def test_can_raise_if_missing_user(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None: """Test_can_raise_if_missing_user.""" with pytest.raises(UserNotFoundError): - AuthorizationService.import_permissions_from_yaml_file( - raise_if_missing_user=True - ) + AuthorizationService.import_permissions_from_yaml_file(raise_if_missing_user=True) - def test_does_not_fail_if_user_not_created( - self, app: Flask, with_db_and_bpmn_file_cleanup: None - ) -> None: + def test_does_not_fail_if_user_not_created(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None: """Test_does_not_fail_if_user_not_created.""" AuthorizationService.import_permissions_from_yaml_file() - def test_can_import_permissions_from_yaml( - self, app: Flask, with_db_and_bpmn_file_cleanup: None - ) -> None: + def test_can_import_permissions_from_yaml(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None: """Test_can_import_permissions_from_yaml.""" usernames = [ "testadmin1", @@ -57,42 +49,22 @@ class TestAuthorizationService(BaseTest): AuthorizationService.import_permissions_from_yaml_file() assert len(users["testadmin1"].groups) == 2 - testadmin1_group_identifiers = sorted( - [g.identifier for g in users["testadmin1"].groups] - ) + testadmin1_group_identifiers = sorted([g.identifier for g in users["testadmin1"].groups]) assert testadmin1_group_identifiers == ["admin", "everybody"] assert len(users["testuser1"].groups) == 2 - testuser1_group_identifiers = sorted( - [g.identifier for g in users["testuser1"].groups] - ) + testuser1_group_identifiers = sorted([g.identifier for g in users["testuser1"].groups]) assert testuser1_group_identifiers == ["Finance Team", "everybody"] assert len(users["testuser2"].groups) == 3 - self.assert_user_has_permission( - users["testuser1"], "update", "/v1.0/process-groups/finance/model1" - ) - self.assert_user_has_permission( - users["testuser1"], "update", "/v1.0/process-groups/finance/" - ) - self.assert_user_has_permission( - users["testuser1"], "update", "/v1.0/process-groups/", expected_result=False - ) - self.assert_user_has_permission( - users["testuser4"], "update", "/v1.0/process-groups/finance/model1" - ) + self.assert_user_has_permission(users["testuser1"], "update", "/v1.0/process-groups/finance/model1") + self.assert_user_has_permission(users["testuser1"], "update", "/v1.0/process-groups/finance/") + self.assert_user_has_permission(users["testuser1"], "update", "/v1.0/process-groups/", expected_result=False) + self.assert_user_has_permission(users["testuser4"], "update", "/v1.0/process-groups/finance/model1") # via the user, not the group - self.assert_user_has_permission( - users["testuser4"], "read", "/v1.0/process-groups/finance/model1" - ) - self.assert_user_has_permission( - users["testuser2"], "update", "/v1.0/process-groups/finance/model1" - ) - self.assert_user_has_permission( - users["testuser2"], "update", "/v1.0/process-groups/", expected_result=False - ) - self.assert_user_has_permission( - users["testuser2"], "read", "/v1.0/process-groups/" - ) + self.assert_user_has_permission(users["testuser4"], "read", "/v1.0/process-groups/finance/model1") + self.assert_user_has_permission(users["testuser2"], "update", "/v1.0/process-groups/finance/model1") + self.assert_user_has_permission(users["testuser2"], "update", "/v1.0/process-groups/", expected_result=False) + self.assert_user_has_permission(users["testuser2"], "read", "/v1.0/process-groups/") def test_user_can_be_added_to_human_task_on_first_login( self, @@ -117,9 +89,7 @@ class TestAuthorizationService(BaseTest): bpmn_file_location="model_with_lanes", ) - process_model = ProcessModelService.get_process_model( - process_model_id=process_model_identifier - ) + process_model = ProcessModelService.get_process_model(process_model_id=process_model_identifier) process_instance = self.create_process_instance_from_process_model( process_model=process_model, user=initiator_user ) @@ -129,9 +99,7 @@ class TestAuthorizationService(BaseTest): spiff_task = processor.__class__.get_task_by_bpmn_identifier( human_task.task_name, processor.bpmn_process_instance ) - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, human_task - ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task) human_task = process_instance.active_human_tasks[0] spiff_task = processor.__class__.get_task_by_bpmn_identifier( @@ -145,9 +113,7 @@ class TestAuthorizationService(BaseTest): "email": "testuser2", } ) - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user, human_task - ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, finance_user, human_task) def test_explode_permissions_all_on_process_group( self, @@ -196,9 +162,7 @@ class TestAuthorizationService(BaseTest): permissions_to_assign = AuthorizationService.explode_permissions( "all", "PG:/some-process-group/some-process-model" ) - permissions_to_assign_tuples = sorted( - [(p.target_uri, p.permission) for p in permissions_to_assign] - ) + permissions_to_assign_tuples = sorted([(p.target_uri, p.permission) for p in permissions_to_assign]) assert permissions_to_assign_tuples == expected_permissions def test_explode_permissions_start_on_process_group( @@ -226,9 +190,7 @@ class TestAuthorizationService(BaseTest): permissions_to_assign = AuthorizationService.explode_permissions( "start", "PG:/some-process-group/some-process-model" ) - permissions_to_assign_tuples = sorted( - [(p.target_uri, p.permission) for p in permissions_to_assign] - ) + permissions_to_assign_tuples = sorted([(p.target_uri, p.permission) for p in permissions_to_assign]) assert permissions_to_assign_tuples == expected_permissions def test_explode_permissions_all_on_process_model( @@ -274,9 +236,7 @@ class TestAuthorizationService(BaseTest): permissions_to_assign = AuthorizationService.explode_permissions( "all", "PM:/some-process-group/some-process-model" ) - permissions_to_assign_tuples = sorted( - [(p.target_uri, p.permission) for p in permissions_to_assign] - ) + permissions_to_assign_tuples = sorted([(p.target_uri, p.permission) for p in permissions_to_assign]) assert permissions_to_assign_tuples == expected_permissions def test_explode_permissions_start_on_process_model( @@ -304,9 +264,7 @@ class TestAuthorizationService(BaseTest): permissions_to_assign = AuthorizationService.explode_permissions( "start", "PM:/some-process-group/some-process-model" ) - permissions_to_assign_tuples = sorted( - [(p.target_uri, p.permission) for p in permissions_to_assign] - ) + permissions_to_assign_tuples = sorted([(p.target_uri, p.permission) for p in permissions_to_assign]) assert permissions_to_assign_tuples == expected_permissions def test_explode_permissions_basic( @@ -332,9 +290,7 @@ class TestAuthorizationService(BaseTest): ("/user-groups/for-current-user", "read"), ] permissions_to_assign = AuthorizationService.explode_permissions("all", "BASIC") - permissions_to_assign_tuples = sorted( - [(p.target_uri, p.permission) for p in permissions_to_assign] - ) + permissions_to_assign_tuples = sorted([(p.target_uri, p.permission) for p in permissions_to_assign]) assert permissions_to_assign_tuples == expected_permissions def test_explode_permissions_all( @@ -351,9 +307,7 @@ class TestAuthorizationService(BaseTest): ("/*", "update"), ] permissions_to_assign = AuthorizationService.explode_permissions("all", "ALL") - permissions_to_assign_tuples = sorted( - [(p.target_uri, p.permission) for p in permissions_to_assign] - ) + permissions_to_assign_tuples = sorted([(p.target_uri, p.permission) for p in permissions_to_assign]) assert permissions_to_assign_tuples == expected_permissions def test_explode_permissions_with_target_uri( @@ -369,12 +323,8 @@ class TestAuthorizationService(BaseTest): ("/hey/model", "read"), ("/hey/model", "update"), ] - permissions_to_assign = AuthorizationService.explode_permissions( - "all", "/hey/model" - ) - permissions_to_assign_tuples = sorted( - [(p.target_uri, p.permission) for p in permissions_to_assign] - ) + permissions_to_assign = AuthorizationService.explode_permissions("all", "/hey/model") + permissions_to_assign_tuples = sorted([(p.target_uri, p.permission) for p in permissions_to_assign]) assert permissions_to_assign_tuples == expected_permissions def test_granting_access_to_group_gives_access_to_group_and_subgroups( @@ -387,9 +337,7 @@ class TestAuthorizationService(BaseTest): user = self.find_or_create_user(username="user_one") user_group = GroupService.find_or_create_group("group_one") UserService.add_user_to_group(user, user_group) - AuthorizationService.add_permission_from_uri_or_macro( - user_group.identifier, "read", "PG:hey" - ) + AuthorizationService.add_permission_from_uri_or_macro(user_group.identifier, "read", "PG:hey") self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey") self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo") @@ -453,17 +401,11 @@ class TestAuthorizationService(BaseTest): self.assert_user_has_permission(user_two, "read", "/v1.0/process-groups/hey") self.assert_user_has_permission(user_two, "read", "/v1.0/process-groups/hey:yo") - self.assert_user_has_permission( - user_two, "create", "/v1.0/process-groups/hey:yo" - ) + self.assert_user_has_permission(user_two, "create", "/v1.0/process-groups/hey:yo") assert GroupModel.query.filter_by(identifier="group_three").first() is not None self.assert_user_has_permission(user_two, "read", "/v1.0/process-groups/hey2") - self.assert_user_has_permission( - user_two, "read", "/v1.0/process-groups/hey2:yo" - ) - self.assert_user_has_permission( - user_two, "create", "/v1.0/process-groups/hey2:yo" - ) + self.assert_user_has_permission(user_two, "read", "/v1.0/process-groups/hey2:yo") + self.assert_user_has_permission(user_two, "create", "/v1.0/process-groups/hey2:yo") # remove access to 'hey' from user_two group_info = [ @@ -482,19 +424,11 @@ class TestAuthorizationService(BaseTest): assert GroupModel.query.filter_by(identifier="group_one").first() is not None self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey") self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo") - self.assert_user_has_permission( - user, "create", "/v1.0/process-groups/hey:yo", expected_result=False - ) + self.assert_user_has_permission(user, "create", "/v1.0/process-groups/hey:yo", expected_result=False) self.assert_user_has_permission(admin_user, "create", "/anything-they-want") - self.assert_user_has_permission( - user_two, "read", "/v1.0/process-groups/hey", expected_result=False - ) + self.assert_user_has_permission(user_two, "read", "/v1.0/process-groups/hey", expected_result=False) assert GroupModel.query.filter_by(identifier="group_three").first() is not None self.assert_user_has_permission(user_two, "read", "/v1.0/process-groups/hey2") - self.assert_user_has_permission( - user_two, "read", "/v1.0/process-groups/hey2:yo" - ) - self.assert_user_has_permission( - user_two, "create", "/v1.0/process-groups/hey2:yo" - ) + self.assert_user_has_permission(user_two, "read", "/v1.0/process-groups/hey2:yo") + self.assert_user_has_permission(user_two, "create", "/v1.0/process-groups/hey2:yo") diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_dot_notation.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_dot_notation.py index c646a754..ca05db9c 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_dot_notation.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_dot_notation.py @@ -39,9 +39,7 @@ class TestDotNotation(BaseTest): client, process_model_identifier, headers ) process_instance_id = response.json["id"] - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) + process_instance = ProcessInstanceService().get_process_instance(process_instance_id) processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) @@ -55,9 +53,7 @@ class TestDotNotation(BaseTest): "invoice.invoiceAmount": "1000.00", "invoice.dueDate": "09/30/2022", } - ProcessInstanceService.complete_form_task( - processor, user_task, form_data, with_super_admin_user, human_task - ) + ProcessInstanceService.complete_form_task(processor, user_task, form_data, with_super_admin_user, human_task) expected = { "contibutorName": "Elizabeth", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py index 44060449..d41ae3e9 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py @@ -54,17 +54,13 @@ class TestErrorHandlingService(BaseTest): ) # Process instance should be marked as errored by default. - process_instance = self.run_process_model_and_handle_error( - process_model, with_super_admin_user - ) + process_instance = self.run_process_model_and_handle_error(process_model, with_super_admin_user) assert ProcessInstanceStatus.error.value == process_instance.status # If process model should be suspended on error, then that is what should happen. process_model.fault_or_suspend_on_exception = "suspend" ProcessModelService.save_process_model(process_model) - process_instance = self.run_process_model_and_handle_error( - process_model, with_super_admin_user - ) + process_instance = self.run_process_model_and_handle_error(process_model, with_super_admin_user) assert ProcessInstanceStatus.suspended.value == process_instance.status def test_error_sends_bpmn_message( @@ -86,14 +82,10 @@ class TestErrorHandlingService(BaseTest): process_model_source_directory="error", bpmn_file_name="error_handler.bpmn", # Slightly misnamed, it sends and receives ) - process_model.exception_notification_addresses = [ - "dan@ILoveToReadErrorsInMyEmails.com" - ] + process_model.exception_notification_addresses = ["dan@ILoveToReadErrorsInMyEmails.com"] ProcessModelService.save_process_model(process_model) # kick off the process and assure it got marked as an error. - process_instance = self.run_process_model_and_handle_error( - process_model, with_super_admin_user - ) + process_instance = self.run_process_model_and_handle_error(process_model, with_super_admin_user) assert ProcessInstanceStatus.error.value == process_instance.status # Both send and receive messages should be generated, matched diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_message_instance.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_message_instance.py index b48bc239..2c07222e 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_message_instance.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_message_instance.py @@ -38,16 +38,10 @@ class TestMessageInstance(BaseTest): ) -> None: """Test_can_create_message_instance.""" message_name = "Message Model One" - process_model_identifier = self.setup_message_tests( - client, with_super_admin_user - ) + process_model_identifier = self.setup_message_tests(client, with_super_admin_user) - process_model = ProcessModelService.get_process_model( - process_model_id=process_model_identifier - ) - process_instance = self.create_process_instance_from_process_model( - process_model, "waiting" - ) + process_model = ProcessModelService.get_process_model(process_model_id=process_model_identifier) + process_instance = self.create_process_instance_from_process_model(process_model, "waiting") queued_message = MessageInstanceModel( process_instance_id=process_instance.id, @@ -62,9 +56,7 @@ class TestMessageInstance(BaseTest): assert queued_message.status == "ready" assert queued_message.failure_cause is None - queued_message_from_query = MessageInstanceModel.query.filter_by( # type: ignore - id=queued_message.id - ).first() + queued_message_from_query = MessageInstanceModel.query.filter_by(id=queued_message.id).first() # type: ignore assert queued_message_from_query is not None def test_cannot_set_invalid_status( @@ -76,15 +68,9 @@ class TestMessageInstance(BaseTest): ) -> None: """Test_cannot_set_invalid_status.""" message_name = "message_model_one" - process_model_identifier = self.setup_message_tests( - client, with_super_admin_user - ) - process_model = ProcessModelService.get_process_model( - process_model_id=process_model_identifier - ) - process_instance = self.create_process_instance_from_process_model( - process_model, "waiting" - ) + process_model_identifier = self.setup_message_tests(client, with_super_admin_user) + process_model = ProcessModelService.get_process_model(process_model_id=process_model_identifier) + process_instance = self.create_process_instance_from_process_model(process_model, "waiting") with pytest.raises(ValueError) as exception: MessageInstanceModel( @@ -94,9 +80,7 @@ class TestMessageInstance(BaseTest): name=message_name, status="BAD_STATUS", ) - assert ( - str(exception.value) == "MessageInstanceModel: invalid status: BAD_STATUS" - ) + assert str(exception.value) == "MessageInstanceModel: invalid status: BAD_STATUS" queued_message = MessageInstanceModel( process_instance_id=process_instance.id, @@ -109,9 +93,7 @@ class TestMessageInstance(BaseTest): with pytest.raises(ValueError) as exception: queued_message.status = "BAD_STATUS" - assert ( - str(exception.value) == "MessageInstanceModel: invalid status: BAD_STATUS" - ) + assert str(exception.value) == "MessageInstanceModel: invalid status: BAD_STATUS" def test_cannot_set_invalid_message_type( self, @@ -122,16 +104,10 @@ class TestMessageInstance(BaseTest): ) -> None: """Test_cannot_set_invalid_message_type.""" message_name = "message_model_one" - process_model_identifier = self.setup_message_tests( - client, with_super_admin_user - ) + process_model_identifier = self.setup_message_tests(client, with_super_admin_user) - process_model = ProcessModelService.get_process_model( - process_model_id=process_model_identifier - ) - process_instance = self.create_process_instance_from_process_model( - process_model, "waiting" - ) + process_model = ProcessModelService.get_process_model(process_model_id=process_model_identifier) + process_instance = self.create_process_instance_from_process_model(process_model, "waiting") with pytest.raises(ValueError) as exception: MessageInstanceModel( @@ -140,10 +116,7 @@ class TestMessageInstance(BaseTest): message_type="BAD_MESSAGE_TYPE", name=message_name, ) - assert ( - str(exception.value) - == "MessageInstanceModel: invalid message_type: BAD_MESSAGE_TYPE" - ) + assert str(exception.value) == "MessageInstanceModel: invalid message_type: BAD_MESSAGE_TYPE" queued_message = MessageInstanceModel( process_instance_id=process_instance.id, @@ -156,10 +129,7 @@ class TestMessageInstance(BaseTest): with pytest.raises(ValueError) as exception: queued_message.message_type = "BAD_MESSAGE_TYPE" - assert ( - str(exception.value) - == "MessageInstanceModel: invalid message_type: BAD_MESSAGE_TYPE" - ) + assert str(exception.value) == "MessageInstanceModel: invalid message_type: BAD_MESSAGE_TYPE" def test_force_failure_cause_if_status_is_failure( self, @@ -170,16 +140,10 @@ class TestMessageInstance(BaseTest): ) -> None: """Test_force_failure_cause_if_status_is_failure.""" message_name = "message_model_one" - process_model_identifier = self.setup_message_tests( - client, with_super_admin_user - ) + process_model_identifier = self.setup_message_tests(client, with_super_admin_user) - process_model = ProcessModelService.get_process_model( - process_model_id=process_model_identifier - ) - process_instance = self.create_process_instance_from_process_model( - process_model, "waiting" - ) + process_model = ProcessModelService.get_process_model(process_model_id=process_model_identifier) + process_instance = self.create_process_instance_from_process_model(process_model, "waiting") queued_message = MessageInstanceModel( process_instance_id=process_instance.id, @@ -191,10 +155,7 @@ class TestMessageInstance(BaseTest): db.session.add(queued_message) with pytest.raises(ValueError) as exception: db.session.commit() - assert ( - str(exception.value) - == "MessageInstanceModel: failure_cause must be set if status is failed" - ) + assert str(exception.value) == "MessageInstanceModel: failure_cause must be set if status is failed" assert queued_message.id is None db.session.remove() # type: ignore diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_message_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_message_service.py index c0898a04..2d2f7baa 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_message_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_message_service.py @@ -104,9 +104,7 @@ class TestMessageService(BaseTest): ) # Now start the main process - self.start_sender_process( - client, with_super_admin_user, "test_between_processes" - ) + self.start_sender_process(client, with_super_admin_user, "test_between_processes") self.assure_a_message_was_sent() # This is typically called in a background cron process, so we will manually call it @@ -142,9 +140,7 @@ class TestMessageService(BaseTest): # The message receiver process is also complete message_receiver_process = ( - ProcessInstanceModel.query.filter_by( - process_model_identifier="test_group/message_receive" - ) + ProcessInstanceModel.query.filter_by(process_model_identifier="test_group/message_receive") .order_by(ProcessInstanceModel.id) .first() ) @@ -157,9 +153,7 @@ class TestMessageService(BaseTest): group_name: str = "test_group", ) -> None: process_group_id = group_name - self.create_process_group( - client, with_super_admin_user, process_group_id, process_group_id - ) + self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) process_model = load_test_spec( "test_group/message", @@ -195,9 +189,7 @@ class TestMessageService(BaseTest): ) assert len(send_messages) == 1 send_message = send_messages[0] - assert ( - send_message.payload == self.payload - ), "The send message should match up with the payload" + assert send_message.payload == self.payload, "The send message should match up with the payload" assert send_message.name == "Request Approval" assert send_message.status == "ready" @@ -214,14 +206,10 @@ class TestMessageService(BaseTest): waiting_message = waiting_messages[0] self.assure_correlation_properties_are_right(waiting_message) - def assure_correlation_properties_are_right( - self, message: MessageInstanceModel - ) -> None: + def assure_correlation_properties_are_right(self, message: MessageInstanceModel) -> None: # Correlation Properties should match up po_curr = next(c for c in message.correlation_rules if c.name == "po_number") - customer_curr = next( - c for c in message.correlation_rules if c.name == "customer_id" - ) + customer_curr = next(c for c in message.correlation_rules if c.name == "customer_id") assert po_curr is not None assert customer_curr is not None @@ -234,9 +222,7 @@ class TestMessageService(BaseTest): ) -> None: """Test_can_send_message_to_multiple_process_models.""" process_group_id = "test_group_multi" - self.create_process_group( - client, with_super_admin_user, process_group_id, process_group_id - ) + self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) process_model_sender = load_test_spec( "test_group/message_sender", @@ -267,18 +253,9 @@ class TestMessageService(BaseTest): # At this point, the message_sender process has fired two different messages but those # processes have not started, and it is now paused, waiting for to receive a message. so # we should have two sends and a receive. - assert ( - MessageInstanceModel.query.filter_by( - process_instance_id=process_instance_sender.id - ).count() - == 3 - ) - assert ( - MessageInstanceModel.query.count() == 3 - ) # all messages are related to the instance - orig_send_messages = MessageInstanceModel.query.filter_by( - message_type="send" - ).all() + assert MessageInstanceModel.query.filter_by(process_instance_id=process_instance_sender.id).count() == 3 + assert MessageInstanceModel.query.count() == 3 # all messages are related to the instance + orig_send_messages = MessageInstanceModel.query.filter_by(message_type="send").all() assert len(orig_send_messages) == 2 assert MessageInstanceModel.query.filter_by(message_type="receive").count() == 1 @@ -292,52 +269,36 @@ class TestMessageService(BaseTest): process_instance_result = ProcessInstanceModel.query.all() assert len(process_instance_result) == 3 process_instance_receiver_one = ( - ProcessInstanceModel.query.filter_by( - process_model_identifier="test_group/message_receiver_one" - ) + ProcessInstanceModel.query.filter_by(process_model_identifier="test_group/message_receiver_one") .order_by(ProcessInstanceModel.id) .first() ) assert process_instance_receiver_one is not None process_instance_receiver_two = ( - ProcessInstanceModel.query.filter_by( - process_model_identifier="test_group/message_receiver_two" - ) + ProcessInstanceModel.query.filter_by(process_model_identifier="test_group/message_receiver_two") .order_by(ProcessInstanceModel.id) .first() ) assert process_instance_receiver_two is not None # just make sure it's a different process instance - assert ( - process_instance_receiver_one.process_model_identifier - == "test_group/message_receiver_one" - ) + assert process_instance_receiver_one.process_model_identifier == "test_group/message_receiver_one" assert process_instance_receiver_one.id != process_instance_sender.id assert process_instance_receiver_one.status == "complete" - assert ( - process_instance_receiver_two.process_model_identifier - == "test_group/message_receiver_two" - ) + assert process_instance_receiver_two.process_model_identifier == "test_group/message_receiver_two" assert process_instance_receiver_two.id != process_instance_sender.id assert process_instance_receiver_two.status == "complete" message_instance_result = ( - MessageInstanceModel.query.order_by(MessageInstanceModel.id) - .order_by(MessageInstanceModel.id) - .all() + MessageInstanceModel.query.order_by(MessageInstanceModel.id).order_by(MessageInstanceModel.id).all() ) assert len(message_instance_result) == 7 message_instance_receiver_one = [ - x - for x in message_instance_result - if x.process_instance_id == process_instance_receiver_one.id + x for x in message_instance_result if x.process_instance_id == process_instance_receiver_one.id ][0] message_instance_receiver_two = [ - x - for x in message_instance_result - if x.process_instance_id == process_instance_receiver_two.id + x for x in message_instance_result if x.process_instance_id == process_instance_receiver_two.id ][0] assert message_instance_receiver_one is not None assert message_instance_receiver_two is not None @@ -349,17 +310,13 @@ class TestMessageService(BaseTest): MessageService.correlate_all_message_instances() message_instance_result = ( - MessageInstanceModel.query.order_by(MessageInstanceModel.id) - .order_by(MessageInstanceModel.id) - .all() + MessageInstanceModel.query.order_by(MessageInstanceModel.id).order_by(MessageInstanceModel.id).all() ) assert len(message_instance_result) == 8 for message_instance in message_instance_result: assert message_instance.status == "completed" - process_instance_result = ProcessInstanceModel.query.order_by( - ProcessInstanceModel.id - ).all() + process_instance_result = ProcessInstanceModel.query.order_by(ProcessInstanceModel.id).all() assert len(process_instance_result) == 3 for process_instance in process_instance_result: assert process_instance.status == "complete" diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permission_target.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permission_target.py index 6ae126dd..9433cbbe 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permission_target.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permission_target.py @@ -13,9 +13,7 @@ from spiffworkflow_backend.models.permission_target import PermissionTargetModel class TestPermissionTarget(BaseTest): """TestPermissionTarget.""" - def test_wildcard_must_go_at_the_end_of_uri( - self, app: Flask, with_db_and_bpmn_file_cleanup: None - ) -> None: + def test_wildcard_must_go_at_the_end_of_uri(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None: """Test_wildcard_must_go_at_the_end_of_uri.""" permission_target = PermissionTargetModel(uri="/test_group/%") db.session.add(permission_target) @@ -27,13 +25,9 @@ class TestPermissionTarget(BaseTest): with pytest.raises(InvalidPermissionTargetUriError) as exception: PermissionTargetModel(uri="/test_group/%/model") - assert ( - str(exception.value) == "Wildcard must appear at end: /test_group/%/model" - ) + assert str(exception.value) == "Wildcard must appear at end: /test_group/%/model" - def test_can_change_asterisk_to_percent_on_creation( - self, app: Flask, with_db_and_bpmn_file_cleanup: None - ) -> None: + def test_can_change_asterisk_to_percent_on_creation(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None: """Test_can_change_asterisk_to_percent_on_creation.""" permission_target = PermissionTargetModel(uri="/test_group/*") db.session.add(permission_target) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py index f45f4ef5..b81164c1 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py @@ -33,9 +33,7 @@ class TestPermissions(BaseTest): ) -> None: """Test_user_can_be_given_permission_to_administer_process_group.""" process_group_id = "group-a" - self.create_process_group( - client, with_super_admin_user, process_group_id, process_group_id - ) + self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) load_test_spec( "group-a/timers_intermediate_catch_event", bpmn_file_name="timers_intermediate_catch_event.bpmn", @@ -85,16 +83,10 @@ class TestPermissions(BaseTest): db.session.add(permission_assignment) db.session.commit() - self.assert_user_has_permission( - group_a_admin, "update", f"/{process_group_a_id}" - ) - self.assert_user_has_permission( - group_a_admin, "update", f"/{process_group_b_id}", expected_result=False - ) + self.assert_user_has_permission(group_a_admin, "update", f"/{process_group_a_id}") + self.assert_user_has_permission(group_a_admin, "update", f"/{process_group_b_id}", expected_result=False) - def test_user_can_be_granted_access_through_a_group( - self, app: Flask, with_db_and_bpmn_file_cleanup: None - ) -> None: + def test_user_can_be_granted_access_through_a_group(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None: """Test_user_can_be_granted_access_through_a_group.""" process_group_ids = ["group-a", "group-b"] process_group_a_id = process_group_ids[0] @@ -158,12 +150,8 @@ class TestPermissions(BaseTest): db.session.add(permission_assignment) db.session.commit() - self.assert_user_has_permission( - group_a_admin, "update", f"/{process_group_a_id}" - ) - self.assert_user_has_permission( - group_a_admin, "update", f"/{process_group_b_id}" - ) + self.assert_user_has_permission(group_a_admin, "update", f"/{process_group_a_id}") + self.assert_user_has_permission(group_a_admin, "update", f"/{process_group_b_id}") def test_user_can_access_base_path_when_given_wildcard_permission( self, app: Flask, with_db_and_bpmn_file_cleanup: None @@ -187,6 +175,4 @@ class TestPermissions(BaseTest): self.assert_user_has_permission(group_a_admin, "update", "/process-models/hey") self.assert_user_has_permission(group_a_admin, "update", "/process-models/") self.assert_user_has_permission(group_a_admin, "update", "/process-models") - self.assert_user_has_permission( - group_a_admin, "update", "/process-modelshey", expected_result=False - ) + self.assert_user_has_permission(group_a_admin, "update", "/process-modelshey", expected_result=False) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_group.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_group.py index 5cf8945f..09001f78 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_group.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_group.py @@ -5,9 +5,7 @@ from spiffworkflow_backend.models.process_group import ProcessGroup from spiffworkflow_backend.services.process_model_service import ProcessModelService -def test_there_is_at_least_one_group_after_we_create_one( - app: Flask, with_db_and_bpmn_file_cleanup: None -) -> None: +def test_there_is_at_least_one_group_after_we_create_one(app: Flask, with_db_and_bpmn_file_cleanup: None) -> None: """Test_there_is_at_least_one_group_after_we_create_one.""" process_group = ProcessGroup(id="hey", display_name="sure") ProcessModelService.add_process_group(process_group) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 827a3b3d..a5faf545 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -58,11 +58,7 @@ class TestProcessInstanceProcessor(BaseTest): app.config["THREAD_LOCAL_DATA"].process_instance_id = 0 script_engine = ProcessInstanceProcessor._script_engine result = script_engine._evaluate("fact_service(type='norris')", {}) - assert ( - result - == "Chuck Norris doesn’t read books. He stares them down until he gets the" - " information he wants." - ) + assert result == "Chuck Norris doesn’t read books. He stares them down until he gets the information he wants." app.config["THREAD_LOCAL_DATA"].process_model_identifier = None app.config["THREAD_LOCAL_DATA"].process_instance_id = None @@ -74,9 +70,7 @@ class TestProcessInstanceProcessor(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_sets_permission_correctly_on_human_task.""" - self.create_process_group( - client, with_super_admin_user, "test_group", "test_group" - ) + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") finance_user = self.find_or_create_user("testuser2") assert initiator_user.principal is not None @@ -107,12 +101,8 @@ class TestProcessInstanceProcessor(BaseTest): human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user, human_task - ) - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, human_task - ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, finance_user, human_task) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task) assert len(process_instance.active_human_tasks) == 1 human_task = process_instance.active_human_tasks[0] @@ -124,13 +114,9 @@ class TestProcessInstanceProcessor(BaseTest): human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, human_task - ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task) - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user, human_task - ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, finance_user, human_task) assert len(process_instance.active_human_tasks) == 1 human_task = process_instance.active_human_tasks[0] assert human_task.lane_assignment_id is None @@ -140,9 +126,7 @@ class TestProcessInstanceProcessor(BaseTest): spiff_task = processor.__class__.get_task_by_bpmn_identifier( human_task.task_name, processor.bpmn_process_instance ) - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, human_task - ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task) assert process_instance.status == ProcessInstanceStatus.complete.value @@ -154,9 +138,7 @@ class TestProcessInstanceProcessor(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_sets_permission_correctly_on_human_task_when_using_dict.""" - self.create_process_group( - client, with_super_admin_user, "test_group", "test_group" - ) + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") finance_user_three = self.find_or_create_user("testuser3") finance_user_four = self.find_or_create_user("testuser4") @@ -189,12 +171,8 @@ class TestProcessInstanceProcessor(BaseTest): human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user_three, human_task - ) - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, human_task - ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, finance_user_three, human_task) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task) assert human_task.completed_by_user_id == initiator_user.id assert len(process_instance.active_human_tasks) == 1 @@ -207,14 +185,10 @@ class TestProcessInstanceProcessor(BaseTest): human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, human_task - ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task) g.user = finance_user_three - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user_three, human_task - ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, finance_user_three, human_task) assert human_task.completed_by_user_id == finance_user_three.id assert len(process_instance.active_human_tasks) == 1 human_task = process_instance.active_human_tasks[0] @@ -226,13 +200,9 @@ class TestProcessInstanceProcessor(BaseTest): human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, human_task - ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task) - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user_four, human_task - ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, finance_user_four, human_task) assert human_task.completed_by_user_id == finance_user_four.id assert len(process_instance.active_human_tasks) == 1 human_task = process_instance.active_human_tasks[0] @@ -243,9 +213,7 @@ class TestProcessInstanceProcessor(BaseTest): spiff_task = processor.__class__.get_task_by_bpmn_identifier( human_task.task_name, processor.bpmn_process_instance ) - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, human_task - ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task) assert len(process_instance.active_human_tasks) == 1 human_task = process_instance.active_human_tasks[0] @@ -253,12 +221,8 @@ class TestProcessInstanceProcessor(BaseTest): human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, human_task - ) - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, testadmin1, human_task - ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, testadmin1, human_task) assert process_instance.status == ProcessInstanceStatus.complete.value @@ -286,9 +250,7 @@ class TestProcessInstanceProcessor(BaseTest): processor = ProcessInstanceProcessor(process_instance) # this task will be found within subprocesses - spiff_task = processor.__class__.get_task_by_bpmn_identifier( - "do_nothing", processor.bpmn_process_instance - ) + spiff_task = processor.__class__.get_task_by_bpmn_identifier("do_nothing", processor.bpmn_process_instance) assert spiff_task is not None assert spiff_task.state == TaskState.COMPLETED @@ -300,9 +262,7 @@ class TestProcessInstanceProcessor(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_does_not_recreate_human_tasks_on_multiple_saves.""" - self.create_process_group( - client, with_super_admin_user, "test_group", "test_group" - ) + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") finance_user_three = self.find_or_create_user("testuser3") assert initiator_user.principal is not None @@ -335,26 +295,16 @@ class TestProcessInstanceProcessor(BaseTest): spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( human_task_one.task_name, processor.bpmn_process_instance ) - ProcessInstanceService.complete_form_task( - processor, spiff_manual_task, {}, initiator_user, human_task_one - ) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - process_instance = ProcessInstanceModel.query.filter_by( - id=process_instance.id - ).first() + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() processor = ProcessInstanceProcessor(process_instance) human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.bpmn_process_instance.get_task( - UUID(human_task_one.task_id) - ) - ProcessInstanceService.complete_form_task( - processor, spiff_manual_task, {}, initiator_user, human_task_one - ) + spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) # recreate variables to ensure all bpmn json was recreated from scratch from the db - process_instance_relookup = ProcessInstanceModel.query.filter_by( - id=process_instance.id - ).first() + process_instance_relookup = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() processor_final = ProcessInstanceProcessor(process_instance_relookup) assert process_instance_relookup.status == "complete" @@ -383,23 +333,16 @@ class TestProcessInstanceProcessor(BaseTest): spiff_tasks_checked_once: list = [] # TODO: also check task data here from the spiff_task directly to ensure we hydrated spiff correctly - def assert_spiff_task_is_in_process( - spiff_task_name: str, bpmn_process_identifier: str - ) -> None: + def assert_spiff_task_is_in_process(spiff_task_name: str, bpmn_process_identifier: str) -> None: if spiff_task.task_spec.name == spiff_task_name: expected_python_env_data = expected_task_data[spiff_task.task_spec.name] if spiff_task.task_spec.name in spiff_tasks_checked_once: - expected_python_env_data = expected_task_data[ - f"{spiff_task.task_spec.name}_second" - ] + expected_python_env_data = expected_task_data[f"{spiff_task.task_spec.name}_second"] task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() assert task.task_definition_id is not None task_definition = task.task_definition assert task_definition.bpmn_identifier == spiff_task_name - assert ( - task_definition.bpmn_process_definition.bpmn_identifier - == bpmn_process_identifier - ) + assert task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier assert task.python_env_data() == expected_python_env_data spiff_tasks_checked_once.append(spiff_task.task_spec.name) @@ -407,12 +350,8 @@ class TestProcessInstanceProcessor(BaseTest): assert len(all_spiff_tasks) > 1 for spiff_task in all_spiff_tasks: assert spiff_task.state == TaskState.COMPLETED - assert_spiff_task_is_in_process( - "test_process_to_call_script", "test_process_to_call" - ) - assert_spiff_task_is_in_process( - "top_level_subprocess_script", "top_level_subprocess" - ) + assert_spiff_task_is_in_process("test_process_to_call_script", "test_process_to_call") + assert_spiff_task_is_in_process("top_level_subprocess_script", "top_level_subprocess") assert_spiff_task_is_in_process("top_level_script", "top_level_process") def test_does_not_recreate_human_tasks_on_multiple_saves( @@ -423,9 +362,7 @@ class TestProcessInstanceProcessor(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_does_not_recreate_human_tasks_on_multiple_saves.""" - self.create_process_group( - client, with_super_admin_user, "test_group", "test_group" - ) + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") finance_user_three = self.find_or_create_user("testuser3") assert initiator_user.principal is not None @@ -475,9 +412,7 @@ class TestProcessInstanceProcessor(BaseTest): assert process_instance.locked_at_in_seconds is None processor.lock_process_instance("TEST") - process_instance = ProcessInstanceModel.query.filter_by( - id=process_instance.id - ).first() + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() assert process_instance.locked_by is not None assert process_instance.locked_at_in_seconds is not None @@ -489,9 +424,7 @@ class TestProcessInstanceProcessor(BaseTest): processor.unlock_process_instance("TEST") - process_instance = ProcessInstanceModel.query.filter_by( - id=process_instance.id - ).first() + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() assert process_instance.locked_by is None assert process_instance.locked_at_in_seconds is None @@ -520,9 +453,7 @@ class TestProcessInstanceProcessor(BaseTest): spiff_task = processor.__class__.get_task_by_bpmn_identifier( human_task_one.task_name, processor.bpmn_process_instance ) - ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, human_task_one - ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task_one) assert len(process_instance.active_human_tasks) == 1 assert len(process_instance.human_tasks) == 2 diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py index a0e694dc..3542e7dd 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py @@ -99,9 +99,7 @@ class TestProcessInstanceReportFilter(BaseTest): class TestProcessInstanceReportService(BaseTest): """TestProcessInstanceReportService.""" - def _filter_from_metadata( - self, report_metadata: dict - ) -> ProcessInstanceReportFilter: + def _filter_from_metadata(self, report_metadata: dict) -> ProcessInstanceReportFilter: """Docstring.""" report = ProcessInstanceReportModel( identifier="test", @@ -313,9 +311,7 @@ class TestProcessInstanceReportService(BaseTest): report_filter = self._filter_from_metadata( { "columns": [], - "filter_by": [ - {"field_name": "process_model_identifier", "field_value": "bob"} - ], + "filter_by": [{"field_name": "process_model_identifier", "field_value": "bob"}], } ) @@ -543,9 +539,7 @@ class TestProcessInstanceReportService(BaseTest): report_filter = self._filter_from_metadata_with_overrides( { "columns": [], - "filter_by": [ - {"field_name": "process_model_identifier", "field_value": "bob"} - ], + "filter_by": [{"field_name": "process_model_identifier", "field_value": "bob"}], }, process_model_identifier="joe", ) @@ -660,9 +654,7 @@ class TestProcessInstanceReportService(BaseTest): report_filter = self._filter_from_metadata_with_overrides( { "columns": [], - "filter_by": [ - {"field_name": "process_status", "field_value": "joe,bob"} - ], + "filter_by": [{"field_name": "process_status", "field_value": "joe,bob"}], }, process_status="sue", ) @@ -766,31 +758,19 @@ class TestProcessInstanceReportService(BaseTest): user_two = self.find_or_create_user(username="user_two") # Several processes to ensure they do not return in the result - self.create_process_instance_from_process_model( - process_model=process_model, status="complete", user=user_one - ) - self.create_process_instance_from_process_model( - process_model=process_model, status="complete", user=user_one - ) - self.create_process_instance_from_process_model( - process_model=process_model, status="waiting", user=user_one - ) - self.create_process_instance_from_process_model( - process_model=process_model, status="complete", user=user_two - ) - self.create_process_instance_from_process_model( - process_model=process_model, status="complete", user=user_two - ) + self.create_process_instance_from_process_model(process_model=process_model, status="complete", user=user_one) + self.create_process_instance_from_process_model(process_model=process_model, status="complete", user=user_one) + self.create_process_instance_from_process_model(process_model=process_model, status="waiting", user=user_one) + self.create_process_instance_from_process_model(process_model=process_model, status="complete", user=user_two) + self.create_process_instance_from_process_model(process_model=process_model, status="complete", user=user_two) process_instance_report = ProcessInstanceReportService.report_with_identifier( user=user_one, report_identifier="system_report_completed_instances_initiated_by_me", ) - report_filter = ( - ProcessInstanceReportService.filter_from_metadata_with_overrides( - process_instance_report=process_instance_report, - process_model_identifier=process_model.id, - ) + report_filter = ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report=process_instance_report, + process_model_identifier=process_model.id, ) response_json = ProcessInstanceReportService.run_process_instance_report( report_filter=report_filter, @@ -821,30 +801,18 @@ class TestProcessInstanceReportService(BaseTest): user_two = self.find_or_create_user(username="user_two") # Several processes to ensure they do not return in the result - process_instance_created_by_user_one_one = ( - self.create_process_instance_from_process_model( - process_model=process_model, status="complete", user=user_one - ) - ) - self.create_process_instance_from_process_model( + process_instance_created_by_user_one_one = self.create_process_instance_from_process_model( process_model=process_model, status="complete", user=user_one ) - process_instance_created_by_user_one_three = ( - self.create_process_instance_from_process_model( - process_model=process_model, status="waiting", user=user_one - ) + self.create_process_instance_from_process_model(process_model=process_model, status="complete", user=user_one) + process_instance_created_by_user_one_three = self.create_process_instance_from_process_model( + process_model=process_model, status="waiting", user=user_one ) - process_instance_created_by_user_two_one = ( - self.create_process_instance_from_process_model( - process_model=process_model, status="complete", user=user_two - ) - ) - self.create_process_instance_from_process_model( + process_instance_created_by_user_two_one = self.create_process_instance_from_process_model( process_model=process_model, status="complete", user=user_two ) - self.create_process_instance_from_process_model( - process_model=process_model, status="waiting", user=user_two - ) + self.create_process_instance_from_process_model(process_model=process_model, status="complete", user=user_two) + self.create_process_instance_from_process_model(process_model=process_model, status="waiting", user=user_two) human_task_for_user_one_one = HumanTaskModel( process_instance_id=process_instance_created_by_user_one_one.id, @@ -880,15 +848,11 @@ class TestProcessInstanceReportService(BaseTest): process_instance_report = ProcessInstanceReportService.report_with_identifier( user=user_one, - report_identifier=( - "system_report_completed_instances_with_tasks_completed_by_me" - ), + report_identifier="system_report_completed_instances_with_tasks_completed_by_me", ) - report_filter = ( - ProcessInstanceReportService.filter_from_metadata_with_overrides( - process_instance_report=process_instance_report, - process_model_identifier=process_model.id, - ) + report_filter = ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report=process_instance_report, + process_model_identifier=process_model.id, ) response_json = ProcessInstanceReportService.run_process_instance_report( report_filter=report_filter, @@ -898,10 +862,7 @@ class TestProcessInstanceReportService(BaseTest): assert len(response_json["results"]) == 1 assert response_json["results"][0]["process_initiator_id"] == user_two.id - assert ( - response_json["results"][0]["id"] - == process_instance_created_by_user_two_one.id - ) + assert response_json["results"][0]["id"] == process_instance_created_by_user_two_one.id assert response_json["results"][0]["status"] == "complete" def test_can_filter_by_completed_instances_with_tasks_completed_by_my_groups( @@ -931,30 +892,18 @@ class TestProcessInstanceReportService(BaseTest): UserService.add_user_to_group(user_three, user_group_two) # Several processes to ensure they do not return in the result - process_instance_created_by_user_one_one = ( - self.create_process_instance_from_process_model( - process_model=process_model, status="complete", user=user_one - ) - ) - self.create_process_instance_from_process_model( + process_instance_created_by_user_one_one = self.create_process_instance_from_process_model( process_model=process_model, status="complete", user=user_one ) - process_instance_created_by_user_one_three = ( - self.create_process_instance_from_process_model( - process_model=process_model, status="waiting", user=user_one - ) + self.create_process_instance_from_process_model(process_model=process_model, status="complete", user=user_one) + process_instance_created_by_user_one_three = self.create_process_instance_from_process_model( + process_model=process_model, status="waiting", user=user_one ) - process_instance_created_by_user_two_one = ( - self.create_process_instance_from_process_model( - process_model=process_model, status="complete", user=user_two - ) - ) - self.create_process_instance_from_process_model( + process_instance_created_by_user_two_one = self.create_process_instance_from_process_model( process_model=process_model, status="complete", user=user_two ) - self.create_process_instance_from_process_model( - process_model=process_model, status="waiting", user=user_two - ) + self.create_process_instance_from_process_model(process_model=process_model, status="complete", user=user_two) + self.create_process_instance_from_process_model(process_model=process_model, status="waiting", user=user_two) human_task_for_user_group_one_one = HumanTaskModel( process_instance_id=process_instance_created_by_user_one_one.id, @@ -985,15 +934,11 @@ class TestProcessInstanceReportService(BaseTest): process_instance_report = ProcessInstanceReportService.report_with_identifier( user=user_one, - report_identifier=( - "system_report_completed_instances_with_tasks_completed_by_my_groups" - ), + report_identifier="system_report_completed_instances_with_tasks_completed_by_my_groups", ) - report_filter = ( - ProcessInstanceReportService.filter_from_metadata_with_overrides( - process_instance_report=process_instance_report, - process_model_identifier=process_model.id, - ) + report_filter = ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report=process_instance_report, + process_model_identifier=process_model.id, ) response_json = ProcessInstanceReportService.run_process_instance_report( report_filter=report_filter, @@ -1003,16 +948,10 @@ class TestProcessInstanceReportService(BaseTest): assert len(response_json["results"]) == 2 assert response_json["results"][0]["process_initiator_id"] == user_two.id - assert ( - response_json["results"][0]["id"] - == process_instance_created_by_user_two_one.id - ) + assert response_json["results"][0]["id"] == process_instance_created_by_user_two_one.id assert response_json["results"][0]["status"] == "complete" assert response_json["results"][1]["process_initiator_id"] == user_one.id - assert ( - response_json["results"][1]["id"] - == process_instance_created_by_user_one_one.id - ) + assert response_json["results"][1]["id"] == process_instance_created_by_user_one_one.id assert response_json["results"][1]["status"] == "complete" def test_can_filter_by_with_relation_to_me( @@ -1042,32 +981,20 @@ class TestProcessInstanceReportService(BaseTest): UserService.add_user_to_group(user_three, user_group_two) # Several processes to ensure they do not return in the result - process_instance_created_by_user_one_one = ( - self.create_process_instance_from_process_model( - process_model=process_model, status="complete", user=user_one - ) + process_instance_created_by_user_one_one = self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one ) - process_instance_created_by_user_one_two = ( - self.create_process_instance_from_process_model( - process_model=process_model, status="complete", user=user_one - ) + process_instance_created_by_user_one_two = self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one ) - process_instance_created_by_user_one_three = ( - self.create_process_instance_from_process_model( - process_model=process_model, status="waiting", user=user_one - ) + process_instance_created_by_user_one_three = self.create_process_instance_from_process_model( + process_model=process_model, status="waiting", user=user_one ) - process_instance_created_by_user_two_one = ( - self.create_process_instance_from_process_model( - process_model=process_model, status="complete", user=user_two - ) - ) - self.create_process_instance_from_process_model( + process_instance_created_by_user_two_one = self.create_process_instance_from_process_model( process_model=process_model, status="complete", user=user_two ) - self.create_process_instance_from_process_model( - process_model=process_model, status="waiting", user=user_two - ) + self.create_process_instance_from_process_model(process_model=process_model, status="complete", user=user_two) + self.create_process_instance_from_process_model(process_model=process_model, status="waiting", user=user_two) human_task_for_user_group_one_one = HumanTaskModel( process_instance_id=process_instance_created_by_user_one_one.id, @@ -1098,15 +1025,11 @@ class TestProcessInstanceReportService(BaseTest): UserService.add_user_to_human_tasks_if_appropriate(user_one) - process_instance_report = ProcessInstanceReportService.report_with_identifier( - user=user_one - ) - report_filter = ( - ProcessInstanceReportService.filter_from_metadata_with_overrides( - process_instance_report=process_instance_report, - process_model_identifier=process_model.id, - with_relation_to_me=True, - ) + process_instance_report = ProcessInstanceReportService.report_with_identifier(user=user_one) + report_filter = ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report=process_instance_report, + process_model_identifier=process_model.id, + with_relation_to_me=True, ) response_json = ProcessInstanceReportService.run_process_instance_report( report_filter=report_filter, @@ -1116,19 +1039,7 @@ class TestProcessInstanceReportService(BaseTest): assert len(response_json["results"]) == 4 process_instance_ids_in_results = [r["id"] for r in response_json["results"]] - assert ( - process_instance_created_by_user_one_one.id - in process_instance_ids_in_results - ) - assert ( - process_instance_created_by_user_one_two.id - in process_instance_ids_in_results - ) - assert ( - process_instance_created_by_user_one_three.id - in process_instance_ids_in_results - ) - assert ( - process_instance_created_by_user_two_one.id - in process_instance_ids_in_results - ) + assert process_instance_created_by_user_one_one.id in process_instance_ids_in_results + assert process_instance_created_by_user_one_two.id in process_instance_ids_in_results + assert process_instance_created_by_user_one_three.id in process_instance_ids_in_results + assert process_instance_created_by_user_two_one.id in process_instance_ids_in_results diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_service.py index 94b8658d..83f19a69 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_service.py @@ -37,10 +37,7 @@ class TestProcessInstanceService(BaseTest): assert model.mimetype == "some/mimetype" assert model.filename == "testing.txt" assert model.contents == b"testing\n" # type: ignore - assert ( - model.digest - == "12a61f4e173fb3a11c05d6471f74728f76231b4a5fcd9667cef3af87a3ae4dc2" - ) + assert model.digest == "12a61f4e173fb3a11c05d6471f74728f76231b4a5fcd9667cef3af87a3ae4dc2" def test_can_create_file_data_model_for_file_data_value( self, @@ -215,9 +212,7 @@ class TestProcessInstanceService(BaseTest): processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - process_instance_logs = SpiffLoggingModel.query.filter_by( - process_instance_id=process_instance.id - ).all() + process_instance_logs = SpiffLoggingModel.query.filter_by(process_instance_id=process_instance.id).all() initial_length = len(process_instance_logs) # ensure we have something in the logs @@ -225,7 +220,5 @@ class TestProcessInstanceService(BaseTest): # logs should NOT increase after running this a second time since it's just waiting on a human task processor.do_engine_steps(save=True) - process_instance_logs = SpiffLoggingModel.query.filter_by( - process_instance_id=process_instance.id - ).all() + process_instance_logs = SpiffLoggingModel.query.filter_by(process_instance_id=process_instance.id).all() assert len(process_instance_logs) == initial_length diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py index a5ac6c96..4d8e1b5b 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py @@ -22,9 +22,7 @@ class TestProcessModel(BaseTest): def test_initializes_files_as_empty_array(self) -> None: """Test_initializes_files_as_empty_array.""" - process_model_one = self.create_test_process_model( - id="model_one", display_name="Model One" - ) + process_model_one = self.create_test_process_model(id="model_one", display_name="Model One") assert process_model_one.files == [] def test_can_run_process_model_with_call_activities_when_in_same_process_model_directory( @@ -35,18 +33,14 @@ class TestProcessModel(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_run_process_model_with_call_activities.""" - self.create_process_group( - client, with_super_admin_user, "test_group", "test_group" - ) + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/call_activity_test", # bpmn_file_name="call_activity_test.bpmn", process_model_source_directory="call_activity_same_directory", ) - process_instance = self.create_process_instance_from_process_model( - process_model - ) + process_instance = self.create_process_instance_from_process_model(process_model) processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) assert process_instance.status == "complete" @@ -59,9 +53,7 @@ class TestProcessModel(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_run_process_model_with_call_activities.""" - self.create_process_group( - client, with_super_admin_user, "test_group", "test_group" - ) + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/call_activity_nested", process_model_source_directory="call_activity_nested", @@ -79,9 +71,7 @@ class TestProcessModel(BaseTest): process_model_source_directory="call_activity_nested", bpmn_file_name=bpmn_file_name, ) - process_instance = self.create_process_instance_from_process_model( - process_model - ) + process_instance = self.create_process_instance_from_process_model(process_model) processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) assert process_instance.status == "complete" @@ -94,9 +84,7 @@ class TestProcessModel(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_run_process_model_with_call_activities.""" - self.create_process_group( - client, with_super_admin_user, "test_group", "test_group" - ) + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/call_activity_nested", process_model_source_directory="call_activity_nested", @@ -114,9 +102,7 @@ class TestProcessModel(BaseTest): process_model_source_directory="call_activity_nested", bpmn_file_name=bpmn_file_name, ) - process_instance = self.create_process_instance_from_process_model( - process_model - ) + process_instance = self.create_process_instance_from_process_model(process_model) # delete all of the id lookup items to force to processor to find the correct # process model when running the process @@ -134,9 +120,7 @@ class TestProcessModel(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_run_process_model_with_call_activities.""" - self.create_process_group( - client, with_super_admin_user, "test_group", "test_group" - ) + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/hello_world", process_model_source_directory="nested-task-data-structure", @@ -151,25 +135,19 @@ class TestProcessModel(BaseTest): }, ) - process_instance = self.create_process_instance_from_process_model( - process_model - ) + process_instance = self.create_process_instance_from_process_model(process_model) processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) assert process_instance.status == "complete" - process_instance_metadata_awesome_var = ( - ProcessInstanceMetadataModel.query.filter_by( - process_instance_id=process_instance.id, key="awesome_var" - ).first() - ) + process_instance_metadata_awesome_var = ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=process_instance.id, key="awesome_var" + ).first() assert process_instance_metadata_awesome_var is not None assert process_instance_metadata_awesome_var.value == "sweet2" - process_instance_metadata_awesome_var = ( - ProcessInstanceMetadataModel.query.filter_by( - process_instance_id=process_instance.id, key="invoice_number" - ).first() - ) + process_instance_metadata_awesome_var = ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=process_instance.id, key="invoice_number" + ).first() assert process_instance_metadata_awesome_var is not None assert process_instance_metadata_awesome_var.value == "123" diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model_service.py index 7392bdfd..79d52888 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model_service.py @@ -19,9 +19,7 @@ class TestProcessModelService(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_update_specified_attributes.""" - self.create_process_group( - client, with_super_admin_user, "test_group", "test_group" - ) + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/hello_world", bpmn_file_name="hello_world.bpmn", @@ -32,9 +30,7 @@ class TestProcessModelService(BaseTest): primary_process_id = process_model.primary_process_id assert primary_process_id == "Process_HelloWorld" - ProcessModelService.update_process_model( - process_model, {"display_name": "new_name"} - ) + ProcessModelService.update_process_model(process_model, {"display_name": "new_name"}) assert process_model.display_name == "new_name" assert process_model.primary_process_id == primary_process_id diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py index 0f5d4770..e0b1535d 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py @@ -23,9 +23,7 @@ class TestOpenFile(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_form_data_conversion_to_dot_dict.""" - self.create_process_group( - client, with_super_admin_user, "test_group", "test_group" - ) + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/dangerous", bpmn_file_name="read_etc_passwd.bpmn", @@ -33,9 +31,7 @@ class TestOpenFile(BaseTest): ) self.find_or_create_user() - process_instance = self.create_process_instance_from_process_model( - process_model - ) + process_instance = self.create_process_instance_from_process_model(process_model) processor = ProcessInstanceProcessor(process_instance) with pytest.raises(ApiError) as exception: @@ -54,9 +50,7 @@ class TestImportModule(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_form_data_conversion_to_dot_dict.""" - self.create_process_group( - client, with_super_admin_user, "test_group", "test_group" - ) + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/dangerous", bpmn_file_name="read_env.bpmn", @@ -64,9 +58,7 @@ class TestImportModule(BaseTest): ) self.find_or_create_user() - process_instance = self.create_process_instance_from_process_model( - process_model - ) + process_instance = self.create_process_instance_from_process_model(process_model) processor = ProcessInstanceProcessor(process_instance) with pytest.raises(ApiError) as exception: diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py index 9ece043a..0fc3ee66 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py @@ -26,9 +26,7 @@ class TestScriptUnitTestRunner(BaseTest): app.config["THREAD_LOCAL_DATA"].process_instance_id = None process_group_id = "test_logging_spiff_logger" - self.create_process_group( - client, with_super_admin_user, process_group_id, process_group_id - ) + self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) process_model_id = "simple_script" process_model_identifier = f"{process_group_id}/{process_model_id}" load_test_spec( @@ -36,14 +34,10 @@ class TestScriptUnitTestRunner(BaseTest): bpmn_file_name=process_model_id, process_model_source_directory=process_model_id, ) - bpmn_process_instance = ( - ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model( - process_model_identifier - ) - ) - task = ProcessInstanceProcessor.get_task_by_bpmn_identifier( - "Activity_CalculateNewData", bpmn_process_instance + bpmn_process_instance = ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model( + process_model_identifier ) + task = ProcessInstanceProcessor.get_task_by_bpmn_identifier("Activity_CalculateNewData", bpmn_process_instance) assert task is not None input_context: PythonScriptContext = {"a": 1} @@ -68,9 +62,7 @@ class TestScriptUnitTestRunner(BaseTest): app.config["THREAD_LOCAL_DATA"].process_instance_id = None process_group_id = "test_logging_spiff_logger" - self.create_process_group( - client, with_super_admin_user, process_group_id, process_group_id - ) + self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) process_model_id = "simple_script" process_model_identifier = f"{process_group_id}/{process_model_id}" @@ -79,14 +71,10 @@ class TestScriptUnitTestRunner(BaseTest): bpmn_file_name=process_model_id, process_model_source_directory=process_model_id, ) - bpmn_process_instance = ( - ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model( - process_model_identifier - ) - ) - task = ProcessInstanceProcessor.get_task_by_bpmn_identifier( - "Activity_CalculateNewData", bpmn_process_instance + bpmn_process_instance = ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model( + process_model_identifier ) + task = ProcessInstanceProcessor.get_task_by_bpmn_identifier("Activity_CalculateNewData", bpmn_process_instance) assert task is not None input_context: PythonScriptContext = {"a": 1} @@ -111,9 +99,7 @@ class TestScriptUnitTestRunner(BaseTest): app.config["THREAD_LOCAL_DATA"].process_instance_id = None process_group_id = "script_with_unit_tests" - self.create_process_group( - client, with_super_admin_user, process_group_id, process_group_id - ) + self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) process_model_id = "script_with_unit_tests" process_model_identifier = f"{process_group_id}/{process_model_id}" @@ -122,21 +108,15 @@ class TestScriptUnitTestRunner(BaseTest): bpmn_file_name=process_model_id, process_model_source_directory=process_model_id, ) - bpmn_process_instance = ( - ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model( - process_model_identifier - ) - ) - task = ProcessInstanceProcessor.get_task_by_bpmn_identifier( - "script_with_unit_test_id", bpmn_process_instance + bpmn_process_instance = ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model( + process_model_identifier ) + task = ProcessInstanceProcessor.get_task_by_bpmn_identifier("script_with_unit_test_id", bpmn_process_instance) assert task is not None expected_output_context: PythonScriptContext = {"hey": True} - unit_test_result = ScriptUnitTestRunner.run_test( - task, "sets_hey_to_true_if_hey_is_false" - ) + unit_test_result = ScriptUnitTestRunner.run_test(task, "sets_hey_to_true_if_hey_is_false") assert unit_test_result.result assert unit_test_result.context == expected_output_context @@ -152,9 +132,7 @@ class TestScriptUnitTestRunner(BaseTest): app.config["THREAD_LOCAL_DATA"].process_instance_id = None process_group_id = "script_with_unit_tests" - self.create_process_group( - client, with_super_admin_user, process_group_id, process_group_id - ) + self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) process_model_id = "script_with_unit_tests" process_model_identifier = f"{process_group_id}/{process_model_id}" @@ -164,21 +142,15 @@ class TestScriptUnitTestRunner(BaseTest): bpmn_file_name=process_model_id, process_model_source_directory=process_model_id, ) - bpmn_process_instance = ( - ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model( - process_model_identifier - ) - ) - task = ProcessInstanceProcessor.get_task_by_bpmn_identifier( - "script_with_unit_test_id", bpmn_process_instance + bpmn_process_instance = ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model( + process_model_identifier ) + task = ProcessInstanceProcessor.get_task_by_bpmn_identifier("script_with_unit_test_id", bpmn_process_instance) assert task is not None expected_output_context: PythonScriptContext = {"something_else": True} - unit_test_result = ScriptUnitTestRunner.run_test( - task, "sets_something_else_if_no_hey" - ) + unit_test_result = ScriptUnitTestRunner.run_test(task, "sets_something_else_if_no_hey") assert unit_test_result.result assert unit_test_result.context == expected_output_context diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_service_task_delegate.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_service_task_delegate.py index 8348d65c..5a101878 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_service_task_delegate.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_service_task_delegate.py @@ -13,32 +13,24 @@ from spiffworkflow_backend.services.service_task_service import ServiceTaskDeleg class TestServiceTaskDelegate(BaseTest): """TestServiceTaskDelegate.""" - def test_check_prefixes_without_secret( - self, app: Flask, with_db_and_bpmn_file_cleanup: None - ) -> None: + def test_check_prefixes_without_secret(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None: """Test_check_prefixes_without_secret.""" result = ServiceTaskDelegate.check_prefixes("hey") assert result == "hey" - def test_check_prefixes_with_int( - self, app: Flask, with_db_and_bpmn_file_cleanup: None - ) -> None: + def test_check_prefixes_with_int(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None: """Test_check_prefixes_with_int.""" result = ServiceTaskDelegate.check_prefixes(1) assert result == 1 - def test_check_prefixes_with_secret( - self, app: Flask, with_db_and_bpmn_file_cleanup: None - ) -> None: + def test_check_prefixes_with_secret(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None: """Test_check_prefixes_with_secret.""" user = self.find_or_create_user("test_user") SecretService().add_secret("hot_secret", "my_secret_value", user.id) result = ServiceTaskDelegate.check_prefixes("secret:hot_secret") assert result == "my_secret_value" - def test_invalid_call_returns_good_error_message( - self, app: Flask, with_db_and_bpmn_file_cleanup: None - ) -> None: + def test_invalid_call_returns_good_error_message(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None: with patch("requests.post") as mock_post: mock_post.return_value.status_code = 404 mock_post.return_value.ok = True @@ -47,7 +39,4 @@ class TestServiceTaskDelegate(BaseTest): ServiceTaskDelegate.call_connector("my_invalid_operation", {}, {}) assert "404" in str(ae) assert "The service did not find the requested resource." in str(ae) - assert ( - "A critical component (The connector proxy) is not responding correctly." - in str(ae) - ) + assert "A critical component (The connector proxy) is not responding correctly." in str(ae) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_spec_file_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_spec_file_service.py index 6ae49860..c2f8acdb 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_spec_file_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_spec_file_service.py @@ -26,9 +26,7 @@ class TestSpecFileService(BaseTest): process_model_id = "call_activity_nested" bpmn_file_name = "call_activity_nested.bpmn" - call_activity_nested_relative_file_path = os.path.join( - process_group_id, process_model_id, bpmn_file_name - ) + call_activity_nested_relative_file_path = os.path.join(process_group_id, process_model_id, bpmn_file_name) def test_can_store_process_ids_for_lookup( self, @@ -49,10 +47,7 @@ class TestSpecFileService(BaseTest): bpmn_process_id_lookups = SpecReferenceCache.query.all() assert len(bpmn_process_id_lookups) == 1 assert bpmn_process_id_lookups[0].identifier == "Level1" - assert ( - bpmn_process_id_lookups[0].relative_path - == self.call_activity_nested_relative_file_path - ) + assert bpmn_process_id_lookups[0].relative_path == self.call_activity_nested_relative_file_path def test_fails_to_save_duplicate_process_id( self, @@ -74,27 +69,17 @@ class TestSpecFileService(BaseTest): bpmn_process_id_lookups = SpecReferenceCache.query.all() assert len(bpmn_process_id_lookups) == 1 assert bpmn_process_id_lookups[0].identifier == bpmn_process_identifier - assert ( - bpmn_process_id_lookups[0].relative_path - == self.call_activity_nested_relative_file_path - ) + assert bpmn_process_id_lookups[0].relative_path == self.call_activity_nested_relative_file_path with pytest.raises(ProcessModelFileInvalidError) as exception: load_test_spec( "call_activity_nested_duplicate", process_model_source_directory="call_activity_duplicate", bpmn_file_name="call_activity_nested_duplicate", ) - assert ( - f"Process id ({bpmn_process_identifier}) has already been used" - in str(exception.value) - ) + assert f"Process id ({bpmn_process_identifier}) has already been used" in str(exception.value) - process_model = ProcessModelService.get_process_model( - "call_activity_nested_duplicate" - ) - full_file_path = SpecFileService.full_file_path( - process_model, "call_activity_nested_duplicate.bpmn" - ) + process_model = ProcessModelService.get_process_model("call_activity_nested_duplicate") + full_file_path = SpecFileService.full_file_path(process_model, "call_activity_nested_duplicate.bpmn") assert not os.path.isfile(full_file_path) def test_updates_relative_file_path_when_appropriate( @@ -126,10 +111,7 @@ class TestSpecFileService(BaseTest): bpmn_process_id_lookups = SpecReferenceCache.query.all() assert len(bpmn_process_id_lookups) == 1 assert bpmn_process_id_lookups[0].identifier == bpmn_process_identifier - assert ( - bpmn_process_id_lookups[0].relative_path - == self.call_activity_nested_relative_file_path - ) + assert bpmn_process_id_lookups[0].relative_path == self.call_activity_nested_relative_file_path def test_change_the_identifier_cleans_up_cache( self, @@ -163,10 +145,7 @@ class TestSpecFileService(BaseTest): assert len(bpmn_process_id_lookups) == 1 assert bpmn_process_id_lookups[0].identifier != old_identifier assert bpmn_process_id_lookups[0].identifier == "Level1" - assert ( - bpmn_process_id_lookups[0].relative_path - == self.call_activity_nested_relative_file_path - ) + assert bpmn_process_id_lookups[0].relative_path == self.call_activity_nested_relative_file_path def test_load_reference_information( self, @@ -200,9 +179,7 @@ class TestSpecFileService(BaseTest): # , # process_model_source_directory="call_activity_nested", # ) - process_model_info = ProcessModelService.get_process_model( - process_model_identifier - ) + process_model_info = ProcessModelService.get_process_model(process_model_identifier) files = SpecFileService.get_files(process_model_info) file = next(filter(lambda f: f.name == "call_activity_level_3.bpmn", files)) @@ -232,9 +209,7 @@ class TestSpecFileService(BaseTest): process_model_source_directory="error", ) with pytest.raises(ProcessModelFileInvalidError): - SpecFileService.update_file( - process_model, "bad_xml.bpmn", b"THIS_IS_NOT_VALID_XML" - ) + SpecFileService.update_file(process_model, "bad_xml.bpmn", b"THIS_IS_NOT_VALID_XML") full_file_path = SpecFileService.full_file_path(process_model, "bad_xml.bpmn") assert not os.path.isfile(full_file_path) @@ -251,16 +226,8 @@ class TestSpecFileService(BaseTest): ) -> None: """Test_does_not_evaluate_entities.""" string_replacement = b"THIS_STRING_SHOULD_NOT_EXIST_ITS_SECRET" - tmp_file = os.path.normpath( - self.get_test_data_file_full_path("file_to_inject", "xml_with_entity") - ) - file_contents = self.get_test_data_file_contents( - "invoice.bpmn", "xml_with_entity" - ) - file_contents = ( - file_contents.decode("utf-8") - .replace("{{FULL_PATH_TO_FILE}}", tmp_file) - .encode() - ) + tmp_file = os.path.normpath(self.get_test_data_file_full_path("file_to_inject", "xml_with_entity")) + file_contents = self.get_test_data_file_contents("invoice.bpmn", "xml_with_entity") + file_contents = file_contents.decode("utf-8").replace("{{FULL_PATH_TO_FILE}}", tmp_file).encode() etree_element = SpecFileService.get_etree_from_xml_bytes(file_contents) assert string_replacement not in etree.tostring(etree_element) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_spiff_logging.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_spiff_logging.py index a47983fe..9c8b4841 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_spiff_logging.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_spiff_logging.py @@ -12,18 +12,14 @@ from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel class TestSpiffLogging(BaseTest): """TestSpiffLogging.""" - def test_timestamps_are_stored_correctly( - self, app: Flask, with_db_and_bpmn_file_cleanup: None - ) -> None: + def test_timestamps_are_stored_correctly(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None: """Test_timestamps_are_stored_correctly.""" process_model = load_test_spec( "call_activity_test", process_model_source_directory="call_activity_same_directory", ) - process_instance = self.create_process_instance_from_process_model( - process_model - ) + process_instance = self.create_process_instance_from_process_model(process_model) bpmn_process_identifier = "test_process_identifier" spiff_task_guid = "test_spiff_task_guid" bpmn_task_identifier = "test_bpmn_task_identifier" diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_user_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_user_service.py index 959975d5..b7960a4f 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_user_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_user_service.py @@ -33,9 +33,7 @@ class TestUserService(BaseTest): ) -> None: """Test_waiting_group_assignments.""" everybody_group = GroupService.find_or_create_group("everybodyGroup") - UserService.add_waiting_group_assignment( - UserGroupAssignmentWaitingModel.MATCH_ALL_USERS, everybody_group - ) + UserService.add_waiting_group_assignment(UserGroupAssignmentWaitingModel.MATCH_ALL_USERS, everybody_group) initiator_user = self.find_or_create_user("initiator_user") assert initiator_user.groups[0] == everybody_group @@ -48,7 +46,5 @@ class TestUserService(BaseTest): """Test_waiting_group_assignments.""" initiator_user = self.find_or_create_user("initiator_user") everybody_group = GroupService.find_or_create_group("everybodyGroup") - UserService.add_waiting_group_assignment( - UserGroupAssignmentWaitingModel.MATCH_ALL_USERS, everybody_group - ) + UserService.add_waiting_group_assignment(UserGroupAssignmentWaitingModel.MATCH_ALL_USERS, everybody_group) assert initiator_user.groups[0] == everybody_group diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_various_bpmn_constructs.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_various_bpmn_constructs.py index 26656143..fa25917e 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_various_bpmn_constructs.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_various_bpmn_constructs.py @@ -28,12 +28,8 @@ class TestVariousBpmnConstructs(BaseTest): "timer_intermediate_catch_event", ) - process_model = ProcessModelService.get_process_model( - process_model_id=process_model_identifier - ) + process_model = ProcessModelService.get_process_model(process_model_id=process_model_identifier) - process_instance = self.create_process_instance_from_process_model( - process_model - ) + process_instance = self.create_process_instance_from_process_model(process_model) processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) From 0f7f9220ecd238a82a80f318f80319e053347c0b Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 16 Mar 2023 00:02:02 -0400 Subject: [PATCH 024/162] try to fix postgres and mysql group by --- .../routes/tasks_controller.py | 38 ++++++++++--------- .../integration/test_process_api.py | 8 ++-- 2 files changed, 25 insertions(+), 21 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index db13c407..bc683595 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -107,6 +107,7 @@ def task_list_my_tasks( potential_owner_usernames_from_group_concat_or_similar = _get_potential_owner_usernames(assigned_user) # FIXME: this breaks postgres. Look at commit c147cdb47b1481f094b8c3d82dc502fe961f4977 for + # UPDATE: maybe fixed in postgres and mysql. remove comment if so. # the postgres fix but it breaks the method for mysql. # error in postgres: # psycopg2.errors.GroupingError) column \"process_instance.process_model_identifier\" must @@ -117,19 +118,12 @@ def task_list_my_tasks( HumanTaskModel.task_title, HumanTaskModel.process_model_display_name, HumanTaskModel.process_instance_id, - ProcessInstanceModel.process_model_identifier, - ProcessInstanceModel.status.label("process_instance_status"), # type: ignore - ProcessInstanceModel.updated_at_in_seconds, - ProcessInstanceModel.created_at_in_seconds, - process_initiator_user.username.label("process_initiator_username"), - GroupModel.identifier.label("assigned_user_group_identifier"), - # func.max does not seem to return columns so we need to call both - func.max(ProcessInstanceModel.process_model_identifier), - func.max(ProcessInstanceModel.status.label("process_instance_status")), # type: ignore - func.max(ProcessInstanceModel.updated_at_in_seconds), - func.max(ProcessInstanceModel.created_at_in_seconds), - func.max(process_initiator_user.username.label("process_initiator_username")), - func.max(GroupModel.identifier.label("assigned_user_group_identifier")), + func.max(ProcessInstanceModel.process_model_identifier).label("process_model_identifier"), + func.max(ProcessInstanceModel.status).label("process_instance_status"), # type: ignore + func.max(ProcessInstanceModel.updated_at_in_seconds).label("updated_at_in_seconds"), + func.max(ProcessInstanceModel.created_at_in_seconds).label("created_at_in_seconds"), + func.max(process_initiator_user.username).label("process_initiator_username"), + func.max(GroupModel.identifier).label("assigned_user_group_identifier"), potential_owner_usernames_from_group_concat_or_similar, ).paginate(page=page, per_page=per_page, error_out=False) @@ -486,12 +480,22 @@ def _get_tasks( potential_owner_usernames_from_group_concat_or_similar = _get_potential_owner_usernames(assigned_user) + process_model_identifier_column = ProcessInstanceModel.process_model_identifier + process_instance_status_column = ProcessInstanceModel.status.label("process_instance_status") # type: ignore + user_username_column = UserModel.username.label("process_initiator_username") # type: ignore + group_identifier_column = GroupModel.identifier.label("assigned_user_group_identifier") + if current_app.config['SPIFFWORKFLOW_BACKEND_DATABASE_TYPE'] == 'postgres': + process_model_identifier_column = func.max(ProcessInstanceModel.process_model_identifier).label("process_model_identifier") + process_instance_status_column = func.max(ProcessInstanceModel.status).label("process_instance_status") # type: ignore + user_username_column = func.max(UserModel.username).label("process_initiator_username") # type: ignore + group_identifier_column = func.max(GroupModel.identifier).label("assigned_user_group_identifier") + human_tasks = ( human_tasks_query.add_columns( - ProcessInstanceModel.process_model_identifier, - ProcessInstanceModel.status.label("process_instance_status"), # type: ignore - UserModel.username.label("process_initiator_username"), # type: ignore - GroupModel.identifier.label("assigned_user_group_identifier"), + process_model_identifier_column, + process_instance_status_column, + user_username_column, + group_identifier_column, HumanTaskModel.task_name, HumanTaskModel.task_title, HumanTaskModel.process_model_display_name, diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index a88570ee..086841c0 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2211,10 +2211,10 @@ class TestProcessApi(BaseTest): # + 2 -Two messages logged for the API Calls used to create the processes. assert len(response.json["results"]) == 6 - @pytest.mark.skipif( - os.environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "postgres", - reason="look at comment in tasks_controller method task_list_my_tasks", - ) + # @pytest.mark.skipif( + # os.environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "postgres", + # reason="look at comment in tasks_controller method task_list_my_tasks", + # ) def test_correct_user_can_get_and_update_a_task( self, app: Flask, From 45550482c5211083161c87b09fbe2b6541b3ce4e Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 16 Mar 2023 00:07:39 -0400 Subject: [PATCH 025/162] lint --- .../spiffworkflow_backend/routes/tasks_controller.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index bc683595..7ca7c6eb 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -119,7 +119,7 @@ def task_list_my_tasks( HumanTaskModel.process_model_display_name, HumanTaskModel.process_instance_id, func.max(ProcessInstanceModel.process_model_identifier).label("process_model_identifier"), - func.max(ProcessInstanceModel.status).label("process_instance_status"), # type: ignore + func.max(ProcessInstanceModel.status).label("process_instance_status"), func.max(ProcessInstanceModel.updated_at_in_seconds).label("updated_at_in_seconds"), func.max(ProcessInstanceModel.created_at_in_seconds).label("created_at_in_seconds"), func.max(process_initiator_user.username).label("process_initiator_username"), @@ -484,10 +484,12 @@ def _get_tasks( process_instance_status_column = ProcessInstanceModel.status.label("process_instance_status") # type: ignore user_username_column = UserModel.username.label("process_initiator_username") # type: ignore group_identifier_column = GroupModel.identifier.label("assigned_user_group_identifier") - if current_app.config['SPIFFWORKFLOW_BACKEND_DATABASE_TYPE'] == 'postgres': - process_model_identifier_column = func.max(ProcessInstanceModel.process_model_identifier).label("process_model_identifier") - process_instance_status_column = func.max(ProcessInstanceModel.status).label("process_instance_status") # type: ignore - user_username_column = func.max(UserModel.username).label("process_initiator_username") # type: ignore + if current_app.config["SPIFFWORKFLOW_BACKEND_DATABASE_TYPE"] == "postgres": + process_model_identifier_column = func.max(ProcessInstanceModel.process_model_identifier).label( + "process_model_identifier" + ) + process_instance_status_column = func.max(ProcessInstanceModel.status).label("process_instance_status") + user_username_column = func.max(UserModel.username).label("process_initiator_username") group_identifier_column = func.max(GroupModel.identifier).label("assigned_user_group_identifier") human_tasks = ( From a22160e81cae47f671bc9aa48b85e1f345828735 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 16 Mar 2023 09:30:25 -0400 Subject: [PATCH 026/162] some initial code to use tasks for logs --- spiffworkflow-backend/migrations/env.py | 2 + .../{434e6494e8ff_.py => 077a27ef1246_.py} | 84 ++++++++++--------- .../models/human_task.py | 4 +- .../src/spiffworkflow_backend/models/task.py | 4 + .../routes/process_instances_controller.py | 56 ++++++++----- .../services/process_instance_processor.py | 21 ++++- .../services/task_service.py | 7 ++ .../services/workflow_execution_service.py | 44 +++++++--- .../integration/test_logging_service.py | 23 ++--- .../unit/test_process_instance_processor.py | 3 +- 10 files changed, 156 insertions(+), 92 deletions(-) rename spiffworkflow-backend/migrations/versions/{434e6494e8ff_.py => 077a27ef1246_.py} (99%) diff --git a/spiffworkflow-backend/migrations/env.py b/spiffworkflow-backend/migrations/env.py index 630e381a..68feded2 100644 --- a/spiffworkflow-backend/migrations/env.py +++ b/spiffworkflow-backend/migrations/env.py @@ -1,3 +1,5 @@ +from __future__ import with_statement + import logging from logging.config import fileConfig diff --git a/spiffworkflow-backend/migrations/versions/434e6494e8ff_.py b/spiffworkflow-backend/migrations/versions/077a27ef1246_.py similarity index 99% rename from spiffworkflow-backend/migrations/versions/434e6494e8ff_.py rename to spiffworkflow-backend/migrations/versions/077a27ef1246_.py index 3663be8a..b3f07625 100644 --- a/spiffworkflow-backend/migrations/versions/434e6494e8ff_.py +++ b/spiffworkflow-backend/migrations/versions/077a27ef1246_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: 434e6494e8ff +Revision ID: 077a27ef1246 Revises: -Create Date: 2023-03-15 12:25:48.665481 +Create Date: 2023-03-15 16:36:23.278887 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. -revision = '434e6494e8ff' +revision = '077a27ef1246' down_revision = None branch_labels = None depends_on = None @@ -235,31 +235,6 @@ def upgrade(): sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('username', 'group_id', name='user_group_assignment_staged_unique') ) - op.create_table('human_task', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('process_instance_id', sa.Integer(), nullable=False), - sa.Column('lane_assignment_id', sa.Integer(), nullable=True), - sa.Column('completed_by_user_id', sa.Integer(), nullable=True), - sa.Column('actual_owner_id', sa.Integer(), nullable=True), - sa.Column('form_file_name', sa.String(length=50), nullable=True), - sa.Column('ui_form_file_name', sa.String(length=50), nullable=True), - sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), - sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), - sa.Column('task_id', sa.String(length=50), nullable=True), - sa.Column('task_name', sa.String(length=255), nullable=True), - sa.Column('task_title', sa.String(length=50), nullable=True), - sa.Column('task_type', sa.String(length=50), nullable=True), - sa.Column('task_status', sa.String(length=50), nullable=True), - sa.Column('process_model_display_name', sa.String(length=255), nullable=True), - sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=True), - sa.Column('completed', sa.Boolean(), nullable=False), - sa.ForeignKeyConstraint(['actual_owner_id'], ['user.id'], ), - sa.ForeignKeyConstraint(['completed_by_user_id'], ['user.id'], ), - sa.ForeignKeyConstraint(['lane_assignment_id'], ['group.id'], ), - sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index(op.f('ix_human_task_completed'), 'human_task', ['completed'], unique=False) op.create_table('message_instance', sa.Column('id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=True), @@ -367,17 +342,33 @@ def upgrade(): op.create_index(op.f('ix_task_guid'), 'task', ['guid'], unique=True) op.create_index(op.f('ix_task_json_data_hash'), 'task', ['json_data_hash'], unique=False) op.create_index(op.f('ix_task_python_env_data_hash'), 'task', ['python_env_data_hash'], unique=False) - op.create_table('human_task_user', + op.create_table('human_task', sa.Column('id', sa.Integer(), nullable=False), - sa.Column('human_task_id', sa.Integer(), nullable=False), - sa.Column('user_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['human_task_id'], ['human_task.id'], ), - sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('human_task_id', 'user_id', name='human_task_user_unique') + sa.Column('process_instance_id', sa.Integer(), nullable=False), + sa.Column('lane_assignment_id', sa.Integer(), nullable=True), + sa.Column('completed_by_user_id', sa.Integer(), nullable=True), + sa.Column('actual_owner_id', sa.Integer(), nullable=True), + sa.Column('form_file_name', sa.String(length=50), nullable=True), + sa.Column('ui_form_file_name', sa.String(length=50), nullable=True), + sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('task_model_id', sa.Integer(), nullable=True), + sa.Column('task_id', sa.String(length=50), nullable=True), + sa.Column('task_name', sa.String(length=255), nullable=True), + sa.Column('task_title', sa.String(length=50), nullable=True), + sa.Column('task_type', sa.String(length=50), nullable=True), + sa.Column('task_status', sa.String(length=50), nullable=True), + sa.Column('process_model_display_name', sa.String(length=255), nullable=True), + sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=True), + sa.Column('completed', sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint(['actual_owner_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['completed_by_user_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['lane_assignment_id'], ['group.id'], ), + sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), + sa.ForeignKeyConstraint(['task_model_id'], ['task.id'], ), + sa.PrimaryKeyConstraint('id') ) - op.create_index(op.f('ix_human_task_user_human_task_id'), 'human_task_user', ['human_task_id'], unique=False) - op.create_index(op.f('ix_human_task_user_user_id'), 'human_task_user', ['user_id'], unique=False) + op.create_index(op.f('ix_human_task_completed'), 'human_task', ['completed'], unique=False) op.create_table('message_instance_correlation_rule', sa.Column('id', sa.Integer(), nullable=False), sa.Column('message_instance_id', sa.Integer(), nullable=False), @@ -390,16 +381,29 @@ def upgrade(): sa.UniqueConstraint('message_instance_id', 'name', name='message_instance_id_name_unique') ) op.create_index(op.f('ix_message_instance_correlation_rule_message_instance_id'), 'message_instance_correlation_rule', ['message_instance_id'], unique=False) + op.create_table('human_task_user', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('human_task_id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['human_task_id'], ['human_task.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('human_task_id', 'user_id', name='human_task_user_unique') + ) + op.create_index(op.f('ix_human_task_user_human_task_id'), 'human_task_user', ['human_task_id'], unique=False) + op.create_index(op.f('ix_human_task_user_user_id'), 'human_task_user', ['user_id'], unique=False) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_index(op.f('ix_message_instance_correlation_rule_message_instance_id'), table_name='message_instance_correlation_rule') - op.drop_table('message_instance_correlation_rule') op.drop_index(op.f('ix_human_task_user_user_id'), table_name='human_task_user') op.drop_index(op.f('ix_human_task_user_human_task_id'), table_name='human_task_user') op.drop_table('human_task_user') + op.drop_index(op.f('ix_message_instance_correlation_rule_message_instance_id'), table_name='message_instance_correlation_rule') + op.drop_table('message_instance_correlation_rule') + op.drop_index(op.f('ix_human_task_completed'), table_name='human_task') + op.drop_table('human_task') op.drop_index(op.f('ix_task_python_env_data_hash'), table_name='task') op.drop_index(op.f('ix_task_json_data_hash'), table_name='task') op.drop_index(op.f('ix_task_guid'), table_name='task') @@ -416,8 +420,6 @@ def downgrade(): op.drop_table('process_instance_file_data') op.drop_table('permission_assignment') op.drop_table('message_instance') - op.drop_index(op.f('ix_human_task_completed'), table_name='human_task') - op.drop_table('human_task') op.drop_table('user_group_assignment_waiting') op.drop_table('user_group_assignment') op.drop_index(op.f('ix_task_definition_bpmn_identifier'), table_name='task_definition') diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py index e1ecd1d1..48e30a57 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py @@ -11,7 +11,7 @@ from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel -from spiffworkflow_backend.models.task import Task +from spiffworkflow_backend.models.task import Task, TaskModel from spiffworkflow_backend.models.user import UserModel @@ -43,6 +43,8 @@ class HumanTaskModel(SpiffworkflowBaseDBModel): updated_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer) + # task_id came first which is why it's a string and task_model_id is the int and foreignkey + task_model_id: int = db.Column(ForeignKey(TaskModel.id), nullable=True) # type: ignore task_id: str = db.Column(db.String(50)) task_name: str = db.Column(db.String(255)) task_title: str = db.Column(db.String(50)) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index 75320e4d..5bee3b39 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -19,6 +19,10 @@ from spiffworkflow_backend.models.json_data import JsonDataModel from spiffworkflow_backend.models.task_definition import TaskDefinitionModel +class TaskNotFoundError(Exception): + pass + + class MultiInstanceType(enum.Enum): """MultiInstanceType.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 489b710c..1c5aff51 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -42,7 +42,7 @@ from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel -from spiffworkflow_backend.models.task import Task +from spiffworkflow_backend.models.task import Task, TaskModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.routes.process_api_blueprint import ( _find_process_instance_by_id_or_raise, @@ -205,28 +205,40 @@ def process_instance_log_list( # to make sure the process instance exists process_instance = _find_process_instance_by_id_or_raise(process_instance_id) - log_query = SpiffLoggingModel.query.filter(SpiffLoggingModel.process_instance_id == process_instance.id) - if not detailed: - log_query = log_query.filter( - # 1. this was the previous implementation, where we only show completed tasks and skipped tasks. - # maybe we want to iterate on this in the future (in a third tab under process instance logs?) - # or_( - # SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore - # SpiffLoggingModel.message.like("Skipped task %"), # type: ignore - # ) - # 2. We included ["End Event", "Default Start Event"] along with Default Throwing Event, but feb 2023 - # we decided to remove them, since they get really chatty when there are lots of subprocesses and call activities. - and_( - SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore - SpiffLoggingModel.bpmn_task_type.in_(["Default Throwing Event"]), # type: ignore - ) - ) - + # log_query = SpiffLoggingModel.query.filter(SpiffLoggingModel.process_instance_id == process_instance.id) + # if not detailed: + # log_query = log_query.filter( + # # 1. this was the previous implementation, where we only show completed tasks and skipped tasks. + # # maybe we want to iterate on this in the future (in a third tab under process instance logs?) + # # or_( + # # SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore + # # SpiffLoggingModel.message.like("Skipped task %"), # type: ignore + # # ) + # # 2. We included ["End Event", "Default Start Event"] along with Default Throwing Event, but feb 2023 + # # we decided to remove them, since they get really chatty when there are lots of subprocesses and call activities. + # and_( + # SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore + # SpiffLoggingModel.bpmn_task_type.in_(["Default Throwing Event"]), # type: ignore + # ) + # ) + # + # logs = ( + # log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore + # .join( + # UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True + # ) # isouter since if we don't have a user, we still want the log + # .add_columns( + # UserModel.username, + # ) + # .paginate(page=page, per_page=per_page, error_out=False) + # ) + log_query = TaskModel.query.filter_by(process_instance_id=process_instance.id) logs = ( - log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore - .join( - UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True - ) # isouter since if we don't have a user, we still want the log + log_query.order_by(TaskModel.end_in_seconds.desc()) # type: ignore + .outerjoin(HumanTaskModel, HumanTaskModel.task_model_id == TaskModel.id) + .outerjoin( + UserModel, UserModel.id == HumanTaskModel.completed_by_user_id + ) .add_columns( UserModel.username, ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 6c8b64fc..b89e0e2d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -235,6 +235,7 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment) # since the task data is not directly mutated when the script executes, need to determine which keys # have been deleted from the environment and remove them from task data if present. context_keys_to_drop = context.keys() - self.state.keys() + # import pdb; pdb.set_trace() for key_to_drop in context_keys_to_drop: context.pop(key_to_drop) @@ -1037,6 +1038,10 @@ class ProcessInstanceProcessor: Expects the save method to commit it. """ + # if self.process_instance_model.bpmn_process_definition_id is not None: + # return None + + # we may have to already process bpmn_defintions if we ever care about the Root task again bpmn_dict = self.serialize() bpmn_dict_keys = ("spec", "subprocess_specs", "serializer_version") process_instance_data_dict = {} @@ -1047,9 +1052,8 @@ class ProcessInstanceProcessor: else: process_instance_data_dict[bpmn_key] = bpmn_dict[bpmn_key] - # we may have to already process bpmn_defintions if we ever care about the Root task again - if self.process_instance_model.bpmn_process_definition_id is None: - self._add_bpmn_process_definitions(bpmn_spec_dict) + # if self.process_instance_model.bpmn_process_definition_id is not None: + self._add_bpmn_process_definitions(bpmn_spec_dict) subprocesses = process_instance_data_dict.pop("subprocesses") bpmn_process_parent, new_task_models, new_json_data_dicts = TaskService.add_bpmn_process( @@ -1144,13 +1148,19 @@ class ProcessInstanceProcessor: human_tasks.remove(at) if human_task is None: + task_guid = str(ready_or_waiting_task.id) + task_model = TaskModel.query.filter_by(guid=task_guid).first() + if task_model is None: + raise TaskNotFoundError(f"Could not find task for human task with guid: {task_guid}") + human_task = HumanTaskModel( process_instance_id=self.process_instance_model.id, process_model_display_name=process_model_display_name, bpmn_process_identifier=bpmn_process_identifier, form_file_name=form_file_name, ui_form_file_name=ui_form_file_name, - task_id=str(ready_or_waiting_task.id), + task_model_id=task_model.id, + task_id=task_guid, task_name=ready_or_waiting_task.task_spec.name, task_title=ready_or_waiting_task.task_spec.description, task_type=ready_or_waiting_task.task_spec.__class__.__name__, @@ -1536,12 +1546,15 @@ class ProcessInstanceProcessor: self._script_engine.environment.revise_state_with_task_data(task) return self.spiff_step_details_mapping(task, start, end) + # self._add_bpmn_json_records() + step_delegate = StepDetailLoggingDelegate(self.increment_spiff_step, spiff_step_details_mapping_builder) task_model_delegate = TaskModelSavingDelegate( secondary_engine_step_delegate=step_delegate, serializer=self._serializer, process_instance=self.process_instance_model, bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, + script_engine=self._script_engine, ) if execution_strategy_name is None: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index ad70175f..af346560 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -171,6 +171,13 @@ class TaskService: tasks = bpmn_process_dict.pop("tasks") bpmn_process_data_dict = bpmn_process_dict.pop("data") + if 'subprocesses' in bpmn_process_dict: + bpmn_process_dict.pop('subprocesses') + if 'spec' in bpmn_process_dict: + bpmn_process_dict.pop('spec') + if 'subprocess_specs' in bpmn_process_dict: + bpmn_process_dict.pop('subprocess_specs') + new_task_models = {} new_json_data_dicts: dict[str, JsonDataDict] = {} diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index cbcd60da..82dd1dc7 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -58,13 +58,16 @@ class TaskModelSavingDelegate(EngineStepDelegate): serializer: BpmnWorkflowSerializer, process_instance: ProcessInstanceModel, bpmn_definition_to_task_definitions_mappings: dict, + script_engine, secondary_engine_step_delegate: Optional[EngineStepDelegate] = None, ) -> None: self.secondary_engine_step_delegate = secondary_engine_step_delegate self.process_instance = process_instance self.bpmn_definition_to_task_definitions_mappings = bpmn_definition_to_task_definitions_mappings + self.script_engine = script_engine self.current_task_model: Optional[TaskModel] = None + self.current_task_start_in_seconds: Optional[float] = None self.task_models: dict[str, TaskModel] = {} self.json_data_dicts: dict[str, JsonDataDict] = {} self.serializer = serializer @@ -75,6 +78,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): Use the bpmn_process_id to do this. """ return self.process_instance.bpmn_process_id is not None + # return True def _update_json_data_dicts_using_list(self, json_data_dict_list: list[Optional[JsonDataDict]]) -> None: for json_data_dict in json_data_dict_list: @@ -83,6 +87,28 @@ class TaskModelSavingDelegate(EngineStepDelegate): def will_complete_task(self, spiff_task: SpiffTask) -> None: if self.should_update_task_model(): + # _bpmn_process, task_model, new_task_models, new_json_data_dicts = ( + # TaskService.find_or_create_task_model_from_spiff_task( + # spiff_task, + # self.process_instance, + # self.serializer, + # bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, + # ) + # ) + # self.current_task_model = task_model + # self.task_models.update(new_task_models) + # self.json_data_dicts.update(new_json_data_dicts) + # self.current_task_model.start_in_seconds = time.time() + self.current_task_start_in_seconds = time.time() + if self.secondary_engine_step_delegate: + self.secondary_engine_step_delegate.will_complete_task(spiff_task) + + def did_complete_task(self, spiff_task: SpiffTask) -> None: + # if self.current_task_model and self.should_update_task_model(): + if self.should_update_task_model(): + # if spiff_task.task_spec.name == 'top_level_script': + # import pdb; pdb.set_trace() + spiff_task.workflow.script_engine.environment.revise_state_with_task_data(spiff_task) _bpmn_process, task_model, new_task_models, new_json_data_dicts = ( TaskService.find_or_create_task_model_from_spiff_task( spiff_task, @@ -91,19 +117,13 @@ class TaskModelSavingDelegate(EngineStepDelegate): bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, ) ) - self.current_task_model = task_model + task_model.start_in_seconds = self.current_task_start_in_seconds or time.time() + task_model.end_in_seconds = time.time() + json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self.serializer) + self._update_json_data_dicts_using_list(json_data_dict_list) self.task_models.update(new_task_models) self.json_data_dicts.update(new_json_data_dicts) - self.current_task_model.start_in_seconds = time.time() - if self.secondary_engine_step_delegate: - self.secondary_engine_step_delegate.will_complete_task(spiff_task) - - def did_complete_task(self, spiff_task: SpiffTask) -> None: - if self.current_task_model and self.should_update_task_model(): - self.current_task_model.end_in_seconds = time.time() - json_data_dict_list = TaskService.update_task_model(self.current_task_model, spiff_task, self.serializer) - self._update_json_data_dicts_using_list(json_data_dict_list) - self.task_models[self.current_task_model.guid] = self.current_task_model + self.task_models[task_model.guid] = task_model if self.secondary_engine_step_delegate: self.secondary_engine_step_delegate.did_complete_task(spiff_task) @@ -122,7 +142,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): for waiting_spiff_task in bpmn_process_instance.get_tasks( TaskState.WAITING | TaskState.CANCELLED | TaskState.READY | TaskState.MAYBE | TaskState.LIKELY ): - bpmn_process, task_model, new_task_models, new_json_data_dicts = ( + _bpmn_process, task_model, new_task_models, new_json_data_dicts = ( TaskService.find_or_create_task_model_from_spiff_task( waiting_spiff_task, self.process_instance, diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py index 17cf79cf..626aac55 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py @@ -59,14 +59,15 @@ class TestLoggingService(BaseTest): assert log_response.status_code == 200 assert log_response.json logs: list = log_response.json["results"] - assert len(logs) > 0 - for log in logs: - assert log["process_instance_id"] == process_instance_id - for key in [ - "timestamp", - "spiff_task_guid", - "bpmn_task_identifier", - "bpmn_process_identifier", - "message", - ]: - assert key in log.keys() + assert len(logs) == 8 + print(f"logs[0]: {logs[0]}") + # for log in logs: + # assert log["process_instance_id"] == process_instance_id + # for key in [ + # "timestamp", + # "spiff_task_guid", + # "bpmn_task_identifier", + # "bpmn_process_identifier", + # "message", + # ]: + # assert key in log.keys() diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index a5faf545..bdbad3b0 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -335,6 +335,7 @@ class TestProcessInstanceProcessor(BaseTest): # TODO: also check task data here from the spiff_task directly to ensure we hydrated spiff correctly def assert_spiff_task_is_in_process(spiff_task_name: str, bpmn_process_identifier: str) -> None: if spiff_task.task_spec.name == spiff_task_name: + base_failure_message = f"Failed on {bpmn_process_identifier} - {spiff_task_name}." expected_python_env_data = expected_task_data[spiff_task.task_spec.name] if spiff_task.task_spec.name in spiff_tasks_checked_once: expected_python_env_data = expected_task_data[f"{spiff_task.task_spec.name}_second"] @@ -343,7 +344,7 @@ class TestProcessInstanceProcessor(BaseTest): task_definition = task.task_definition assert task_definition.bpmn_identifier == spiff_task_name assert task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier - assert task.python_env_data() == expected_python_env_data + assert task.python_env_data() == expected_python_env_data, f"{base_failure_message} Expected: {expected_python_env_data}. Received: {task.python_env_data()}" spiff_tasks_checked_once.append(spiff_task.task_spec.name) all_spiff_tasks = processor_final.bpmn_process_instance.get_tasks() From 24c199d1e52f4b4af02086e0a1c3afa6f8fe17d8 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 16 Mar 2023 10:29:15 -0400 Subject: [PATCH 027/162] some more test stuff w/ burnettk --- spiffworkflow-backend/poetry.lock | 6 +- spiffworkflow-backend/pyproject.toml | 3 +- .../services/process_instance_processor.py | 71 ++++++++++--------- .../services/workflow_execution_service.py | 10 +-- 4 files changed, 48 insertions(+), 42 deletions(-) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index a8d70db3..5c1f42e7 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1894,8 +1894,8 @@ lxml = "*" [package.source] type = "git" url = "https://github.com/sartography/SpiffWorkflow" -reference = "main" -resolved_reference = "f162aac43af3af18d1a55186aeccea154fb8b05d" +reference = "6cad2981712bb61eca23af1adfafce02d3277cb9" +resolved_reference = "6cad2981712bb61eca23af1adfafce02d3277cb9" [[package]] name = "SQLAlchemy" @@ -2274,7 +2274,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.12" -content-hash = "b9ea32912509637f1378d060771de7548d93953aa3db12d6a48098f7dc15205f" +content-hash = "253dc24203f175f363158329b0303c11044bc1bb400b17189658251cb37029f7" [metadata.files] alabaster = [ diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index 87f3a5d5..37bfc09b 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -27,7 +27,8 @@ flask-marshmallow = "*" flask-migrate = "*" flask-restful = "*" werkzeug = "*" -SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} +# SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "f162aac43af3af18d1a55186aeccea154fb8b05d"} +SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "6cad2981712bb61eca23af1adfafce02d3277cb9"} # SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" } sentry-sdk = "^1.10" sphinx-autoapi = "^2.0" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index b89e0e2d..7276165c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -225,6 +225,7 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment) ) -> None: # TODO: once integrated look at the tests that fail without Box # context is task.data + # import pdb; pdb.set_trace() Box.convert_to_box(context) self.state.update(self.globals) self.state.update(external_methods or {}) @@ -234,16 +235,20 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment) finally: # since the task data is not directly mutated when the script executes, need to determine which keys # have been deleted from the environment and remove them from task data if present. - context_keys_to_drop = context.keys() - self.state.keys() # import pdb; pdb.set_trace() + context_keys_to_drop = context.keys() - self.state.keys() + # import pdb; pdb.set_trace() for key_to_drop in context_keys_to_drop: context.pop(key_to_drop) + # import pdb; pdb.set_trace() self.state = self.user_defined_state(external_methods) + # import pdb; pdb.set_trace() # the task data needs to be updated with the current state so data references can be resolved properly. # the state will be removed later once the task is completed. + import pdb; pdb.set_trace() context.update(self.state) def user_defined_state(self, external_methods: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: @@ -1038,8 +1043,8 @@ class ProcessInstanceProcessor: Expects the save method to commit it. """ - # if self.process_instance_model.bpmn_process_definition_id is not None: - # return None + if self.process_instance_model.bpmn_process_definition_id is not None: + return None # we may have to already process bpmn_defintions if we ever care about the Root task again bpmn_dict = self.serialize() @@ -1052,40 +1057,40 @@ class ProcessInstanceProcessor: else: process_instance_data_dict[bpmn_key] = bpmn_dict[bpmn_key] - # if self.process_instance_model.bpmn_process_definition_id is not None: + # if self.process_instance_model.bpmn_process_definition_id is None: self._add_bpmn_process_definitions(bpmn_spec_dict) - subprocesses = process_instance_data_dict.pop("subprocesses") - bpmn_process_parent, new_task_models, new_json_data_dicts = TaskService.add_bpmn_process( - bpmn_process_dict=process_instance_data_dict, - process_instance=self.process_instance_model, - bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, - spiff_workflow=self.bpmn_process_instance, - serializer=self._serializer, - ) - for subprocess_task_id, subprocess_properties in subprocesses.items(): - ( - _bpmn_subprocess, - subprocess_new_task_models, - subprocess_new_json_data_models, - ) = TaskService.add_bpmn_process( - bpmn_process_dict=subprocess_properties, - process_instance=self.process_instance_model, - bpmn_process_parent=bpmn_process_parent, - bpmn_process_guid=subprocess_task_id, - bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, - spiff_workflow=self.bpmn_process_instance, - serializer=self._serializer, - ) - new_task_models.update(subprocess_new_task_models) - new_json_data_dicts.update(subprocess_new_json_data_models) - db.session.bulk_save_objects(new_task_models.values()) - - TaskService.insert_or_update_json_data_records(new_json_data_dicts) + # subprocesses = process_instance_data_dict.pop("subprocesses") + # bpmn_process_parent, new_task_models, new_json_data_dicts = TaskService.add_bpmn_process( + # bpmn_process_dict=process_instance_data_dict, + # process_instance=self.process_instance_model, + # bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, + # spiff_workflow=self.bpmn_process_instance, + # serializer=self._serializer, + # ) + # for subprocess_task_id, subprocess_properties in subprocesses.items(): + # ( + # _bpmn_subprocess, + # subprocess_new_task_models, + # subprocess_new_json_data_models, + # ) = TaskService.add_bpmn_process( + # bpmn_process_dict=subprocess_properties, + # process_instance=self.process_instance_model, + # bpmn_process_parent=bpmn_process_parent, + # bpmn_process_guid=subprocess_task_id, + # bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, + # spiff_workflow=self.bpmn_process_instance, + # serializer=self._serializer, + # ) + # new_task_models.update(subprocess_new_task_models) + # new_json_data_dicts.update(subprocess_new_json_data_models) + # db.session.bulk_save_objects(new_task_models.values()) + # + # TaskService.insert_or_update_json_data_records(new_json_data_dicts) def save(self) -> None: """Saves the current state of this processor to the database.""" - self._add_bpmn_json_records() + # self._add_bpmn_json_records() self.process_instance_model.spiff_serializer_version = self.SERIALIZER_VERSION complete_states = [TaskState.CANCELLED, TaskState.COMPLETED] @@ -1546,7 +1551,7 @@ class ProcessInstanceProcessor: self._script_engine.environment.revise_state_with_task_data(task) return self.spiff_step_details_mapping(task, start, end) - # self._add_bpmn_json_records() + self._add_bpmn_json_records() step_delegate = StepDetailLoggingDelegate(self.increment_spiff_step, spiff_step_details_mapping_builder) task_model_delegate = TaskModelSavingDelegate( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 82dd1dc7..6b9f226f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -77,8 +77,8 @@ class TaskModelSavingDelegate(EngineStepDelegate): Use the bpmn_process_id to do this. """ - return self.process_instance.bpmn_process_id is not None - # return True + # return self.process_instance.bpmn_process_id is not None + return True def _update_json_data_dicts_using_list(self, json_data_dict_list: list[Optional[JsonDataDict]]) -> None: for json_data_dict in json_data_dict_list: @@ -106,9 +106,9 @@ class TaskModelSavingDelegate(EngineStepDelegate): def did_complete_task(self, spiff_task: SpiffTask) -> None: # if self.current_task_model and self.should_update_task_model(): if self.should_update_task_model(): - # if spiff_task.task_spec.name == 'top_level_script': - # import pdb; pdb.set_trace() - spiff_task.workflow.script_engine.environment.revise_state_with_task_data(spiff_task) + if spiff_task.task_spec.name == 'top_level_script': + import pdb; pdb.set_trace() + # spiff_task.workflow.script_engine.environment.revise_state_with_task_data(spiff_task) _bpmn_process, task_model, new_task_models, new_json_data_dicts = ( TaskService.find_or_create_task_model_from_spiff_task( spiff_task, From eef40bf22f6cde155c944c15b93251b0f2877a6c Mon Sep 17 00:00:00 2001 From: Dan Date: Thu, 16 Mar 2023 11:07:56 -0400 Subject: [PATCH 028/162] don't overwrite the current user variable (passed as an argument to the function) with the initiator these values should be kept separate. --- .../services/process_instance_report_service.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index 62f7c993..3de0319e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -404,10 +404,10 @@ class ProcessInstanceReportService: ) if report_filter.process_initiator_username is not None: - user = UserModel.query.filter_by(username=report_filter.process_initiator_username).first() + initiator = UserModel.query.filter_by(username=report_filter.process_initiator_username).first() process_initiator_id = -1 - if user: - process_initiator_id = user.id + if initiator: + process_initiator_id = initiator.id process_instance_query = process_instance_query.filter_by(process_initiator_id=process_initiator_id) if ( From bf5b23a52f3166972556d896748784e785ae944f Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 16 Mar 2023 11:17:21 -0400 Subject: [PATCH 029/162] use box script engine to save everything into task data for time being w/ burnettk --- .../services/process_instance_processor.py | 7 ++++++- .../unit/test_process_instance_processor.py | 3 ++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 6c8b64fc..972f5d4f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -285,7 +285,12 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment) self.state[result_variable] = task.data.pop(result_variable) -class CustomScriptEngineEnvironment(NonTaskDataBasedScriptEngineEnvironment): +# SpiffWorkflow at revision f162aac43af3af18d1a55186aeccea154fb8b05d runs script tasks on ready +# which means that our will_complete_task hook does not have the correct env state when it runs +# so save everything to task data for now until we can figure out a better way to hook into that. +# Revision 6cad2981712bb61eca23af1adfafce02d3277cb9 is the last revision that can run with this. +# class CustomScriptEngineEnvironment(NonTaskDataBasedScriptEngineEnvironment): +class CustomScriptEngineEnvironment(BoxedTaskDataBasedScriptEngineEnvironment): pass diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index a5faf545..486abad7 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -343,7 +343,8 @@ class TestProcessInstanceProcessor(BaseTest): task_definition = task.task_definition assert task_definition.bpmn_identifier == spiff_task_name assert task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier - assert task.python_env_data() == expected_python_env_data + # assert task.python_env_data() == expected_python_env_data + assert task.json_data() == expected_python_env_data spiff_tasks_checked_once.append(spiff_task.task_spec.name) all_spiff_tasks = processor_final.bpmn_process_instance.get_tasks() From 85b0ac32b931475bd4f65c34d47ceb40bc6c59a5 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 16 Mar 2023 11:31:43 -0400 Subject: [PATCH 030/162] build image for save to task data branch w/ burnettk --- .github/workflows/docker_image_for_main_builds.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker_image_for_main_builds.yml b/.github/workflows/docker_image_for_main_builds.yml index 47567d56..6495c273 100644 --- a/.github/workflows/docker_image_for_main_builds.yml +++ b/.github/workflows/docker_image_for_main_builds.yml @@ -31,7 +31,7 @@ on: push: branches: - main - - feature/move_task_data_into_tables + - feature/save_to_task_data jobs: create_frontend_docker_image: From 39c9c1ba09072ebd1f06b5b6f6853cad86ef77f6 Mon Sep 17 00:00:00 2001 From: Dan Date: Thu, 16 Mar 2023 11:54:09 -0400 Subject: [PATCH 031/162] avoid the flicker when switching between detailed view and normal view by just clearing out the data before making a new request. The vast majority of the delay is not in the api call, but in rendering the data - and at just a split second, it isn't long enough for a spinner to be very effective. --- spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index 413a7d35..5c1803f7 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -29,6 +29,11 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { } useEffect(() => { + // Clear out any previous results to avoid a "flicker" effect where columns + // are updated above the incorrect data. + setProcessInstanceLogs([]); + setPagination(null); + const setProcessInstanceLogListFromResult = (result: any) => { setProcessInstanceLogs(result.results); setPagination(result.pagination); From bcf798f299b590a6c8bb2d3993a359c659666025 Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 16 Mar 2023 12:34:44 -0400 Subject: [PATCH 032/162] fix two issues, one where we were not sorting after globbing, and another where we forgot about process groups --- .../tests/spiffworkflow_backend/helpers/example_data.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/example_data.py b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/example_data.py index f0960185..235d62ff 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/example_data.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/example_data.py @@ -41,7 +41,7 @@ class ExampleDataLoader: bpmn_file_name_with_extension = bpmn_file_name if not bpmn_file_name_with_extension: - bpmn_file_name_with_extension = process_model_id + bpmn_file_name_with_extension = os.path.basename(process_model_id) if not bpmn_file_name_with_extension.endswith(".bpmn"): bpmn_file_name_with_extension += ".bpmn" @@ -65,7 +65,7 @@ class ExampleDataLoader: file_name_matcher, ) - files = glob.glob(file_glob) + files = sorted(glob.glob(file_glob)) for file_path in files: if os.path.isdir(file_path): continue # Don't try to process sub directories From 72eefe1de90a4cf1ebbd06bc694f3a5261402fcc Mon Sep 17 00:00:00 2001 From: jbirddog <100367399+jbirddog@users.noreply.github.com> Date: Thu, 16 Mar 2023 14:38:00 -0400 Subject: [PATCH 033/162] Retry locked user input submissions (#185) --- .../spiffworkflow_backend/config/default.py | 8 +++++++ .../routes/tasks_controller.py | 7 +++++- .../services/process_instance_processor.py | 24 +++++++++++++++---- 3 files changed, 33 insertions(+), 6 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py index ca808564..2af3e7df 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py @@ -129,5 +129,13 @@ SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB = environ.get( "SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB", default="greedy" ) +SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_TIMES = int( + environ.get("SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_TIMES", default="3") +) + +SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_INTERVAL_IN_SECONDS = int( + environ.get("SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_INTERVAL_IN_SECONDS", default="1") +) + # this is only used in CI. use SPIFFWORKFLOW_BACKEND_DATABASE_URI instead for real configuration SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD = environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD", default=None) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 7ca7c6eb..ad9868e6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -378,8 +378,13 @@ def task_submit_shared( only_tasks_that_can_be_completed=True, ) + retry_times = current_app.config["SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_TIMES"] + retry_interval_in_seconds = current_app.config[ + "SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_INTERVAL_IN_SECONDS" + ] + with sentry_sdk.start_span(op="task", description="complete_form_task"): - processor.lock_process_instance("Web") + processor.lock_process_instance("Web", retry_times, retry_interval_in_seconds) ProcessInstanceService.complete_form_task( processor=processor, spiff_task=spiff_task, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 6c8b64fc..5e771c12 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -91,9 +91,8 @@ from spiffworkflow_backend.services.file_system_service import FileSystemService from spiffworkflow_backend.services.process_instance_lock_service import ( ProcessInstanceLockService, ) -from spiffworkflow_backend.services.process_instance_queue_service import ( - ProcessInstanceQueueService, -) +from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsAlreadyLockedError +from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate from spiffworkflow_backend.services.spec_file_service import SpecFileService @@ -1459,8 +1458,23 @@ class ProcessInstanceProcessor: return the_status # TODO: replace with implicit/more granular locking in workflow execution service - def lock_process_instance(self, lock_prefix: str) -> None: - ProcessInstanceQueueService.dequeue(self.process_instance_model) + # TODO: remove the retry logic once all user_input_required's don't need to be locked to check timers + def lock_process_instance( + self, lock_prefix: str, retry_count: int = 0, retry_interval_in_seconds: int = 0 + ) -> None: + try: + ProcessInstanceQueueService.dequeue(self.process_instance_model) + except ProcessInstanceIsAlreadyLockedError as e: + if retry_count > 0: + current_app.logger.info( + f"process_instance_id {self.process_instance_model.id} is locked. " + f"will retry {retry_count} times with delay of {retry_interval_in_seconds}." + ) + if retry_interval_in_seconds > 0: + time.sleep(retry_interval_in_seconds) + self.lock_process_instance(lock_prefix, retry_count - 1, retry_interval_in_seconds) + else: + raise e # TODO: replace with implicit/more granular locking in workflow execution service def unlock_process_instance(self, lock_prefix: str) -> None: From 8dc178b95373337728d9f1794266ffa6bdb98624 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 16 Mar 2023 16:14:41 -0400 Subject: [PATCH 034/162] tests are now passing w/ burnettk --- .../src/spiffworkflow_backend/models/task.py | 1 + .../services/process_instance_processor.py | 16 ++- .../services/task_service.py | 23 +-- .../services/workflow_execution_service.py | 133 ++++++++---------- .../integration/test_process_api.py | 4 +- .../unit/test_process_instance_processor.py | 36 ++++- 6 files changed, 126 insertions(+), 87 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index 5bee3b39..95947723 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -50,6 +50,7 @@ class TaskModel(SpiffworkflowBaseDBModel): id: int = db.Column(db.Integer, primary_key=True) guid: str = db.Column(db.String(36), nullable=False, unique=True, index=True) bpmn_process_id: int = db.Column(ForeignKey(BpmnProcessModel.id), nullable=False) # type: ignore + bpmn_process = relationship(BpmnProcessModel) process_instance_id: int = db.Column(ForeignKey("process_instance.id"), nullable=False) # find this by looking up the "workflow_name" and "task_spec" from the properties_json diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 3f00452d..c4bc4856 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -326,6 +326,10 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore environment = CustomScriptEngineEnvironment(default_globals) + # right now spiff is executing script tasks on ready so doing this + # so we know when something fails and we can save it to our database. + self.failing_spiff_task: Optional[SpiffTask] = None + super().__init__(environment=environment) def __get_augment_methods(self, task: Optional[SpiffTask]) -> Dict[str, Callable]: @@ -385,11 +389,14 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore def execute(self, task: SpiffTask, script: str, external_methods: Any = None) -> None: """Execute.""" try: + # reset failing task just in case + self.failing_spiff_task = None methods = self.__get_augment_methods(task) if external_methods: methods.update(external_methods) super().execute(task, script, methods) except WorkflowException as e: + self.failing_spiff_task = task raise e except Exception as e: raise self.create_task_exec_exception(task, script, e) from e @@ -1558,7 +1565,6 @@ class ProcessInstanceProcessor: serializer=self._serializer, process_instance=self.process_instance_model, bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, - # script_engine=self._script_engine, ) if execution_strategy_name is None: @@ -1572,7 +1578,13 @@ class ProcessInstanceProcessor: self._script_engine.environment.finalize_result, self.save, ) - execution_service.do_engine_steps(exit_at, save) + try: + execution_service.do_engine_steps(exit_at, save) + finally: + # clear out failling spiff tasks here since the ProcessInstanceProcessor creates an instance of the + # script engine on a class variable. + if hasattr(self._script_engine, "failing_spiff_task") and self._script_engine.failing_spiff_task is not None: + self._script_engine.failing_spiff_task = None # log the spiff step details so we know what is processing the process # instance when a human task has a timer event. diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 6368eb82..a83a60c3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -202,14 +202,9 @@ class TaskService: bpmn_process.properties_json = bpmn_process_dict - bpmn_process_data_json = json.dumps(bpmn_process_data_dict, sort_keys=True) - bpmn_process_data_hash = sha256(bpmn_process_data_json.encode("utf8")).hexdigest() - if bpmn_process.json_data_hash != bpmn_process_data_hash: - new_json_data_dicts[bpmn_process_data_hash] = { - "hash": bpmn_process_data_hash, - "data": bpmn_process_data_dict, - } - bpmn_process.json_data_hash = bpmn_process_data_hash + bpmn_process_json_data = cls.update_task_data_on_bpmn_process(bpmn_process, bpmn_process_data_dict) + if bpmn_process_json_data is not None: + new_json_data_dicts[bpmn_process_json_data['hash']] = bpmn_process_json_data if bpmn_process_parent is None: process_instance.bpmn_process = bpmn_process @@ -261,6 +256,18 @@ class TaskService: return (bpmn_process, new_task_models, new_json_data_dicts) + @classmethod + def update_task_data_on_bpmn_process( + cls, bpmn_process: BpmnProcessModel, bpmn_process_data_dict: dict + ) -> Optional[JsonDataDict]: + bpmn_process_data_json = json.dumps(bpmn_process_data_dict, sort_keys=True) + bpmn_process_data_hash: str = sha256(bpmn_process_data_json.encode("utf8")).hexdigest() + json_data_dict: Optional[JsonDataDict] = None + if bpmn_process.json_data_hash != bpmn_process_data_hash: + json_data_dict = {"hash": bpmn_process_data_hash, "data": bpmn_process_data_dict} + bpmn_process.json_data_hash = bpmn_process_data_hash + return json_data_dict + @classmethod def _update_task_data_on_task_model( cls, task_model: TaskModel, task_data_dict: dict, task_model_data_column: str diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 19710ea8..3f735901 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -36,7 +36,7 @@ class EngineStepDelegate: def did_complete_task(self, spiff_task: SpiffTask) -> None: pass - def save(self, commit: bool = False) -> None: + def save(self, bpmn_process_instance: BpmnWorkflow, commit: bool = False) -> None: pass def after_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> None: @@ -58,13 +58,11 @@ class TaskModelSavingDelegate(EngineStepDelegate): serializer: BpmnWorkflowSerializer, process_instance: ProcessInstanceModel, bpmn_definition_to_task_definitions_mappings: dict, - # script_engine, secondary_engine_step_delegate: Optional[EngineStepDelegate] = None, ) -> None: self.secondary_engine_step_delegate = secondary_engine_step_delegate self.process_instance = process_instance self.bpmn_definition_to_task_definitions_mappings = bpmn_definition_to_task_definitions_mappings - # self.script_engine = script_engine self.current_task_model: Optional[TaskModel] = None self.current_task_start_in_seconds: Optional[float] = None @@ -72,7 +70,41 @@ class TaskModelSavingDelegate(EngineStepDelegate): self.json_data_dicts: dict[str, JsonDataDict] = {} self.serializer = serializer - def should_update_task_model(self) -> bool: + def will_complete_task(self, spiff_task: SpiffTask) -> None: + if self._should_update_task_model(): + self.current_task_start_in_seconds = time.time() + if self.secondary_engine_step_delegate: + self.secondary_engine_step_delegate.will_complete_task(spiff_task) + + def did_complete_task(self, spiff_task: SpiffTask) -> None: + if self._should_update_task_model(): + self._update_task_model_with_spiff_task(spiff_task) + if self.secondary_engine_step_delegate: + self.secondary_engine_step_delegate.did_complete_task(spiff_task) + + def save(self, bpmn_process_instance: BpmnWorkflow, _commit: bool = True) -> None: + script_engine = bpmn_process_instance.script_engine + if hasattr(script_engine, "failing_spiff_task") and script_engine.failing_spiff_task is not None: + failing_spiff_task = script_engine.failing_spiff_task + self._update_task_model_with_spiff_task(failing_spiff_task) + + db.session.bulk_save_objects(self.task_models.values()) + + TaskService.insert_or_update_json_data_records(self.json_data_dicts) + + if self.secondary_engine_step_delegate: + self.secondary_engine_step_delegate.save(bpmn_process_instance, commit=False) + db.session.commit() + + def after_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> None: + if self._should_update_task_model(): + # excludes FUTURE and COMPLETED. the others were required to get PP1 to go to completion. + for waiting_spiff_task in bpmn_process_instance.get_tasks( + TaskState.WAITING | TaskState.CANCELLED | TaskState.READY | TaskState.MAYBE | TaskState.LIKELY + ): + self._update_task_model_with_spiff_task(waiting_spiff_task) + + def _should_update_task_model(self) -> bool: """We need to figure out if we have previously save task info on this process intance. Use the bpmn_process_id to do this. @@ -85,76 +117,23 @@ class TaskModelSavingDelegate(EngineStepDelegate): if json_data_dict is not None: self.json_data_dicts[json_data_dict["hash"]] = json_data_dict - def will_complete_task(self, spiff_task: SpiffTask) -> None: - if self.should_update_task_model(): - # _bpmn_process, task_model, new_task_models, new_json_data_dicts = ( - # TaskService.find_or_create_task_model_from_spiff_task( - # spiff_task, - # self.process_instance, - # self.serializer, - # bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, - # ) - # ) - # self.current_task_model = task_model - # self.task_models.update(new_task_models) - # self.json_data_dicts.update(new_json_data_dicts) - # self.current_task_model.start_in_seconds = time.time() - self.current_task_start_in_seconds = time.time() - if self.secondary_engine_step_delegate: - self.secondary_engine_step_delegate.will_complete_task(spiff_task) - - def did_complete_task(self, spiff_task: SpiffTask) -> None: - # if self.current_task_model and self.should_update_task_model(): - if self.should_update_task_model(): - # if spiff_task.task_spec.name == 'top_level_script': - # import pdb; pdb.set_trace() - # spiff_task.workflow.script_engine.environment.revise_state_with_task_data(spiff_task) - _bpmn_process, task_model, new_task_models, new_json_data_dicts = ( - TaskService.find_or_create_task_model_from_spiff_task( - spiff_task, - self.process_instance, - self.serializer, - bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, - ) + def _update_task_model_with_spiff_task(self, spiff_task: SpiffTask) -> None: + bpmn_process, task_model, new_task_models, new_json_data_dicts = ( + TaskService.find_or_create_task_model_from_spiff_task( + spiff_task, + self.process_instance, + self.serializer, + bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, ) - task_model.start_in_seconds = self.current_task_start_in_seconds or time.time() - task_model.end_in_seconds = time.time() - json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self.serializer) - self._update_json_data_dicts_using_list(json_data_dict_list) - self.task_models.update(new_task_models) - self.json_data_dicts.update(new_json_data_dicts) - self.task_models[task_model.guid] = task_model - if self.secondary_engine_step_delegate: - self.secondary_engine_step_delegate.did_complete_task(spiff_task) - - def save(self, _commit: bool = True) -> None: - db.session.bulk_save_objects(self.task_models.values()) - - TaskService.insert_or_update_json_data_records(self.json_data_dicts) - - if self.secondary_engine_step_delegate: - self.secondary_engine_step_delegate.save(commit=False) - db.session.commit() - - def after_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> None: - if self.should_update_task_model(): - # excludes FUTURE and COMPLETED. the others were required to get PP1 to go to completion. - for waiting_spiff_task in bpmn_process_instance.get_tasks( - TaskState.WAITING | TaskState.CANCELLED | TaskState.READY | TaskState.MAYBE | TaskState.LIKELY - ): - _bpmn_process, task_model, new_task_models, new_json_data_dicts = ( - TaskService.find_or_create_task_model_from_spiff_task( - waiting_spiff_task, - self.process_instance, - self.serializer, - bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, - ) - ) - self.task_models.update(new_task_models) - self.json_data_dicts.update(new_json_data_dicts) - json_data_dict_list = TaskService.update_task_model(task_model, waiting_spiff_task, self.serializer) - self.task_models[task_model.guid] = task_model - self._update_json_data_dicts_using_list(json_data_dict_list) + ) + bpmn_process_json_data = TaskService.update_task_data_on_bpmn_process(bpmn_process or task_model.bpmn_process, spiff_task.workflow.data) + self.task_models.update(new_task_models) + self.json_data_dicts.update(new_json_data_dicts) + json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self.serializer) + self.task_models[task_model.guid] = task_model + if bpmn_process_json_data is not None: + json_data_dict_list.append(bpmn_process_json_data) + self._update_json_data_dicts_using_list(json_data_dict_list) class StepDetailLoggingDelegate(EngineStepDelegate): @@ -203,7 +182,7 @@ class StepDetailLoggingDelegate(EngineStepDelegate): self.spiff_step_details_mapping(spiff_task, self.current_task_start_in_seconds, time.time()) ) - def save(self, commit: bool = True) -> None: + def save(self, _bpmn_process_instance: BpmnWorkflow, commit: bool = True) -> None: db.session.bulk_insert_mappings(SpiffStepDetailsModel, self.step_details) if commit: db.session.commit() @@ -215,18 +194,20 @@ class ExecutionStrategy: def __init__(self, delegate: EngineStepDelegate): """__init__.""" self.delegate = delegate + self.bpmn_process_instance = None def do_engine_steps(self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None) -> None: pass def save(self) -> None: - self.delegate.save() + self.delegate.save(self.bpmn_process_instance) class GreedyExecutionStrategy(ExecutionStrategy): """The common execution strategy. This will greedily run all engine steps without stopping.""" def do_engine_steps(self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None) -> None: + self.bpmn_process_instance = bpmn_process_instance bpmn_process_instance.do_engine_steps( exit_at=exit_at, will_complete_task=self.delegate.will_complete_task, @@ -243,6 +224,7 @@ class RunUntilServiceTaskExecutionStrategy(ExecutionStrategy): """ def do_engine_steps(self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None) -> None: + self.bpmn_process_instance = bpmn_process_instance engine_steps = list( [ t @@ -310,6 +292,7 @@ class WorkflowExecutionService: ) try: + # import pdb; pdb.set_trace() self.bpmn_process_instance.refresh_waiting_tasks() # TODO: implicit re-entrant locks here `with_dequeued` diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 086841c0..3d7b729b 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -1,5 +1,6 @@ """Test Process Api Blueprint.""" import io +from SpiffWorkflow.task import TaskState import json import os import time @@ -2067,7 +2068,7 @@ class TestProcessApi(BaseTest): # TODO: make sure the system notification process is run on exceptions ... - def test_task_data_is_set_even_if_process_instance_errors( + def test_task_data_is_set_even_if_process_instance_errors_through_the_api( self, app: Flask, client: FlaskClient, @@ -2093,6 +2094,7 @@ class TestProcessApi(BaseTest): processor = ProcessInstanceProcessor(process_instance) spiff_task = processor.get_task_by_bpmn_identifier("script_task_two", processor.bpmn_process_instance) assert spiff_task is not None + assert spiff_task.state == TaskState.WAITING assert spiff_task.data == {"my_var": "THE VAR"} def test_process_model_file_create( diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 00aac15e..bf244a64 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -5,7 +5,8 @@ import pytest from flask import g from flask.app import Flask from flask.testing import FlaskClient -from SpiffWorkflow.task import TaskState # type: ignore +from SpiffWorkflow.task import TaskState +from spiffworkflow_backend.exceptions.api_error import ApiError # type: ignore from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.test_data import load_test_spec @@ -318,6 +319,7 @@ class TestProcessInstanceProcessor(BaseTest): **{"set_in_test_process_to_call_script": 1}, } fourth_data_set = {**third_data_set, **{"a": 1, "we_move_on": True}} + fifth_data_set = {**fourth_data_set, **{'validate_only': False, 'set_top_level_process_script_after_gate': 1}} expected_task_data = { "top_level_script": first_data_set, "manual_task": first_data_set, @@ -345,6 +347,7 @@ class TestProcessInstanceProcessor(BaseTest): assert task_definition.bpmn_identifier == spiff_task_name assert task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier message = f"{base_failure_message} Expected: {expected_python_env_data}. Received: {task.json_data()}" + # TODO: if we split out env data again we will need to use it here instead of json_data # assert task.python_env_data() == expected_python_env_data, message assert task.json_data() == expected_python_env_data, message spiff_tasks_checked_once.append(spiff_task.task_spec.name) @@ -357,6 +360,8 @@ class TestProcessInstanceProcessor(BaseTest): assert_spiff_task_is_in_process("top_level_subprocess_script", "top_level_subprocess") assert_spiff_task_is_in_process("top_level_script", "top_level_process") + assert processor.get_data() == fifth_data_set + def test_does_not_recreate_human_tasks_on_multiple_saves( self, app: Flask, @@ -469,3 +474,32 @@ class TestProcessInstanceProcessor(BaseTest): # EDIT: when using feature/remove-loop-reset branch of SpiffWorkflow, these should be different. assert human_task_two.task_id != human_task_one.task_id + + def test_task_data_is_set_even_if_process_instance_errors( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_task_data_is_set_even_if_process_instance_errors.""" + process_model = load_test_spec( + process_model_id="group/error_with_task_data", + bpmn_file_name="script_error_with_task_data.bpmn", + process_model_source_directory="error", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=with_super_admin_user + ) + + processor = ProcessInstanceProcessor(process_instance) + with pytest.raises(ApiError): + processor.do_engine_steps(save=True) + + process_instance_final = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + processor_final = ProcessInstanceProcessor(process_instance_final) + + spiff_task = processor_final.get_task_by_bpmn_identifier("script_task_two", processor_final.bpmn_process_instance) + assert spiff_task is not None + assert spiff_task.state == TaskState.WAITING + assert spiff_task.data == {"my_var": "THE VAR"} From a0a497d981bd8508a5867a830a2466ca38ff0fd4 Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 16 Mar 2023 16:16:02 -0400 Subject: [PATCH 035/162] add finance.project-lead user --- .../realm_exports/spiffworkflow-realm.json | 71 ++++++++++++------- .../keycloak/test_user_lists/status | 1 + 2 files changed, 48 insertions(+), 24 deletions(-) diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index 1c57944b..d44353b8 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -1505,6 +1505,29 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "d123d384-66a4-4db5-9dbb-d73c12047001", + "createdTimestamp" : 1678997616280, + "username" : "finance.project-lead", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "finance.project-lead@status.im", + "attributes" : { + "spiffworkflow-employeeid" : [ "128" ] + }, + "credentials" : [ { + "id" : "b680f5c5-c2de-4255-9d23-7e18cff3ac4e", + "type" : "password", + "createdDate" : 1678997616336, + "secretData" : "{\"value\":\"4kasmb11Sv62rInh8eFUhS3rGYNymzsvxzfsEIWGYhnlisYuo1iTS2opv/kET/NyJlsYrfwc7yrIqSHvkUHkkA==\",\"salt\":\"q/ees3a4K+3K11olnfPzCQ==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "f6d2488a-446c-493b-bbe8-210ede6f3e42", "createdTimestamp" : 1674148694899, @@ -4578,7 +4601,7 @@ "subType" : "authenticated", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-full-name-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "oidc-address-mapper" ] + "allowed-protocol-mapper-types" : [ "saml-role-list-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper" ] } }, { "id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", @@ -4596,7 +4619,7 @@ "subType" : "anonymous", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "saml-user-property-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-full-name-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper", "saml-user-property-mapper", "oidc-address-mapper", "saml-user-attribute-mapper", "saml-role-list-mapper" ] } }, { "id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c", @@ -4686,7 +4709,7 @@ "internationalizationEnabled" : false, "supportedLocales" : [ ], "authenticationFlows" : [ { - "id" : "04b09640-f53c-4c1b-b2b1-8cac25afc2bb", + "id" : "c54f2b16-9254-481a-9997-fb6cafaa2c00", "alias" : "Account verification options", "description" : "Method with which to verity the existing account", "providerId" : "basic-flow", @@ -4708,7 +4731,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "e7c246f4-71c3-4a48-9037-72438bdcfcbb", + "id" : "eae97d77-649e-4475-a0a3-57fea93a6b5a", "alias" : "Authentication Options", "description" : "Authentication options.", "providerId" : "basic-flow", @@ -4737,7 +4760,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "6e9d415e-98f7-4459-b10b-45b08302c681", + "id" : "1fe6063d-c996-44ae-a082-c11d35b4f9ff", "alias" : "Browser - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4759,7 +4782,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "c86b0fad-f7dd-4c58-974e-25eb83c1dacf", + "id" : "61f58306-7a2f-46ad-994f-04b5eb2a8146", "alias" : "Direct Grant - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4781,7 +4804,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "cb7f4c87-a8fa-445a-a8d4-53869cdfed12", + "id" : "3b4f8b2f-cf0f-45d8-9105-65b1b3d088d5", "alias" : "First broker login - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4803,7 +4826,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "8fa87954-bc65-4f1e-bc55-f5bb49f59fbb", + "id" : "211cd18a-4f93-4b60-ba6f-ae55860a0dbc", "alias" : "Handle Existing Account", "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", "providerId" : "basic-flow", @@ -4825,7 +4848,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "e617d826-c654-4c35-96ad-8381bd1e2298", + "id" : "7c1d5fb9-14f1-4603-bfec-449f8d98e1ea", "alias" : "Reset - Conditional OTP", "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", "providerId" : "basic-flow", @@ -4847,7 +4870,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "2e4a46ae-2813-4b71-9386-c08b2f063fa6", + "id" : "4456ff81-c720-4a78-9096-12c42902da8b", "alias" : "User creation or linking", "description" : "Flow for the existing/non-existing user alternatives", "providerId" : "basic-flow", @@ -4870,7 +4893,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "8fa69de0-13cf-4252-899b-c59a30ebd132", + "id" : "8bb53624-acec-447a-a768-532222ff2e8f", "alias" : "Verify Existing Account by Re-authentication", "description" : "Reauthentication of existing account", "providerId" : "basic-flow", @@ -4892,7 +4915,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "204d20f6-d9a7-49ff-a7a3-45386fb884f4", + "id" : "10a8b52f-b5a1-45ab-aeb6-26963d2c4ec4", "alias" : "browser", "description" : "browser based authentication", "providerId" : "basic-flow", @@ -4928,7 +4951,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "3c0c2987-65db-4920-ae44-34aba220c3fb", + "id" : "6ca05d13-7efc-43a4-8569-e7d45cb6db57", "alias" : "clients", "description" : "Base authentication for clients", "providerId" : "client-flow", @@ -4964,7 +4987,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "68a92113-be75-4e63-a322-8076d6c67650", + "id" : "2439ca39-9c37-4174-9b26-787604440ad6", "alias" : "direct grant", "description" : "OpenID Connect Resource Owner Grant", "providerId" : "basic-flow", @@ -4993,7 +5016,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "a630d78f-4fe1-4350-a19d-d091d1af514d", + "id" : "8c5814fd-61c0-40d3-9176-332b4558afb3", "alias" : "docker auth", "description" : "Used by Docker clients to authenticate against the IDP", "providerId" : "basic-flow", @@ -5008,7 +5031,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "f73b4437-8e82-4788-be69-e437b09b500c", + "id" : "104b06aa-5ce5-490b-9945-0d032d4d521b", "alias" : "first broker login", "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", "providerId" : "basic-flow", @@ -5031,7 +5054,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "b7c8cc6d-bc1f-446e-b263-72214b2f5c56", + "id" : "94dc8a72-c455-4fa1-abeb-ca7f248e24a6", "alias" : "forms", "description" : "Username, password, otp and other auth forms.", "providerId" : "basic-flow", @@ -5053,7 +5076,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "a3bdf79f-8c7d-4bff-807d-76fa61093446", + "id" : "3ee49693-4049-48ff-9c4d-7ffe6507779d", "alias" : "http challenge", "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", "providerId" : "basic-flow", @@ -5075,7 +5098,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "ada41b4e-5a12-496d-aa1e-d31cf8c08226", + "id" : "444e348b-72ed-49e7-949f-b79fc08066d2", "alias" : "registration", "description" : "registration flow", "providerId" : "basic-flow", @@ -5091,7 +5114,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "1c858bcd-2031-4056-bbf0-1fbaecdd7068", + "id" : "b37ea96c-603f-4096-bca4-5f819c243aaf", "alias" : "registration form", "description" : "registration form", "providerId" : "form-flow", @@ -5127,7 +5150,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "ff91e251-d85e-450b-bff7-d45be26777d5", + "id" : "6471829e-0771-4bd7-aa62-797eda24d5c2", "alias" : "reset credentials", "description" : "Reset credentials for a user if they forgot their password or something", "providerId" : "basic-flow", @@ -5163,7 +5186,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7b0680a2-99b9-454c-b145-f286e9d60c58", + "id" : "c743556b-fdfc-4615-8154-a8ad4019dfaa", "alias" : "saml ecp", "description" : "SAML ECP Profile Authentication Flow", "providerId" : "basic-flow", @@ -5179,13 +5202,13 @@ } ] } ], "authenticatorConfig" : [ { - "id" : "aa1e4f55-3e7f-445a-a432-7a972776d719", + "id" : "92245b69-55b4-4bc3-98f1-03ef168f009e", "alias" : "create unique user config", "config" : { "require.password.update.after.registration" : "false" } }, { - "id" : "fd69765e-309b-4c5d-bdd5-51343427cd27", + "id" : "3e657993-a0fc-4073-88d5-882310927e19", "alias" : "review profile config", "config" : { "update.profile.on.first.login" : "missing" diff --git a/spiffworkflow-backend/keycloak/test_user_lists/status b/spiffworkflow-backend/keycloak/test_user_lists/status index 3b57deff..7b76e707 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/status +++ b/spiffworkflow-backend/keycloak/test_user_lists/status @@ -42,6 +42,7 @@ desktop3.sme@status.im,196 desktop4.sme@status.im,197 desktop5.sme@status.im,198 fin@status.im,118 +finance.project-lead@status.im,128 finance_user1@status.im fluffy.project-lead@status.im,162 harmeet@status.im,109 From 48b8a336f63d00f05ff8b7b82c6a5e80e47f3eb1 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 16 Mar 2023 16:18:02 -0400 Subject: [PATCH 036/162] pyl w/ burnettk --- .../services/process_instance_processor.py | 5 ++++- .../src/spiffworkflow_backend/services/task_service.py | 2 +- .../services/workflow_execution_service.py | 4 +++- .../integration/test_process_api.py | 2 +- .../unit/test_process_instance_processor.py | 10 ++++++---- 5 files changed, 15 insertions(+), 8 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index c4bc4856..373d2101 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1583,7 +1583,10 @@ class ProcessInstanceProcessor: finally: # clear out failling spiff tasks here since the ProcessInstanceProcessor creates an instance of the # script engine on a class variable. - if hasattr(self._script_engine, "failing_spiff_task") and self._script_engine.failing_spiff_task is not None: + if ( + hasattr(self._script_engine, "failing_spiff_task") + and self._script_engine.failing_spiff_task is not None + ): self._script_engine.failing_spiff_task = None # log the spiff step details so we know what is processing the process diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index a83a60c3..b8d26142 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -204,7 +204,7 @@ class TaskService: bpmn_process_json_data = cls.update_task_data_on_bpmn_process(bpmn_process, bpmn_process_data_dict) if bpmn_process_json_data is not None: - new_json_data_dicts[bpmn_process_json_data['hash']] = bpmn_process_json_data + new_json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data if bpmn_process_parent is None: process_instance.bpmn_process = bpmn_process diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 3f735901..d5af82cd 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -126,7 +126,9 @@ class TaskModelSavingDelegate(EngineStepDelegate): bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, ) ) - bpmn_process_json_data = TaskService.update_task_data_on_bpmn_process(bpmn_process or task_model.bpmn_process, spiff_task.workflow.data) + bpmn_process_json_data = TaskService.update_task_data_on_bpmn_process( + bpmn_process or task_model.bpmn_process, spiff_task.workflow.data + ) self.task_models.update(new_task_models) self.json_data_dicts.update(new_json_data_dicts) json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self.serializer) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 3d7b729b..2feb2d59 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -1,6 +1,5 @@ """Test Process Api Blueprint.""" import io -from SpiffWorkflow.task import TaskState import json import os import time @@ -10,6 +9,7 @@ from typing import Dict import pytest from flask.app import Flask from flask.testing import FlaskClient +from SpiffWorkflow.task import TaskState # type: ignore from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.test_data import load_test_spec diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index bf244a64..0e46cf5f 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -5,11 +5,11 @@ import pytest from flask import g from flask.app import Flask from flask.testing import FlaskClient -from SpiffWorkflow.task import TaskState -from spiffworkflow_backend.exceptions.api_error import ApiError # type: ignore +from SpiffWorkflow.task import TaskState # type: ignore from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.test_data import load_test_spec +from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus @@ -319,7 +319,7 @@ class TestProcessInstanceProcessor(BaseTest): **{"set_in_test_process_to_call_script": 1}, } fourth_data_set = {**third_data_set, **{"a": 1, "we_move_on": True}} - fifth_data_set = {**fourth_data_set, **{'validate_only': False, 'set_top_level_process_script_after_gate': 1}} + fifth_data_set = {**fourth_data_set, **{"validate_only": False, "set_top_level_process_script_after_gate": 1}} expected_task_data = { "top_level_script": first_data_set, "manual_task": first_data_set, @@ -499,7 +499,9 @@ class TestProcessInstanceProcessor(BaseTest): process_instance_final = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() processor_final = ProcessInstanceProcessor(process_instance_final) - spiff_task = processor_final.get_task_by_bpmn_identifier("script_task_two", processor_final.bpmn_process_instance) + spiff_task = processor_final.get_task_by_bpmn_identifier( + "script_task_two", processor_final.bpmn_process_instance + ) assert spiff_task is not None assert spiff_task.state == TaskState.WAITING assert spiff_task.data == {"my_var": "THE VAR"} From a2e659da8771fd44d42dc367822a9253316b9738 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 16 Mar 2023 16:59:37 -0400 Subject: [PATCH 037/162] added bpmn process definition to bpmn process w/ burnettk --- .../{077a27ef1246_.py => 8ee0f1c23cc7_.py} | 118 +++++++++--------- .../models/bpmn_process.py | 6 + .../services/process_instance_processor.py | 87 ++++++------- .../services/task_service.py | 5 + .../services/workflow_execution_service.py | 1 - 5 files changed, 115 insertions(+), 102 deletions(-) rename spiffworkflow-backend/migrations/versions/{077a27ef1246_.py => 8ee0f1c23cc7_.py} (99%) diff --git a/spiffworkflow-backend/migrations/versions/077a27ef1246_.py b/spiffworkflow-backend/migrations/versions/8ee0f1c23cc7_.py similarity index 99% rename from spiffworkflow-backend/migrations/versions/077a27ef1246_.py rename to spiffworkflow-backend/migrations/versions/8ee0f1c23cc7_.py index b3f07625..47641e3c 100644 --- a/spiffworkflow-backend/migrations/versions/077a27ef1246_.py +++ b/spiffworkflow-backend/migrations/versions/8ee0f1c23cc7_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: 077a27ef1246 +Revision ID: 8ee0f1c23cc7 Revises: -Create Date: 2023-03-15 16:36:23.278887 +Create Date: 2023-03-16 16:24:47.364768 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. -revision = '077a27ef1246' +revision = '8ee0f1c23cc7' down_revision = None branch_labels = None depends_on = None @@ -18,19 +18,6 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('bpmn_process', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('guid', sa.String(length=36), nullable=True), - sa.Column('parent_process_id', sa.Integer(), nullable=True), - sa.Column('properties_json', sa.JSON(), nullable=False), - sa.Column('json_data_hash', sa.String(length=255), nullable=False), - sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), - sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), - sa.ForeignKeyConstraint(['parent_process_id'], ['bpmn_process.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index(op.f('ix_bpmn_process_guid'), 'bpmn_process', ['guid'], unique=True) - op.create_index(op.f('ix_bpmn_process_json_data_hash'), 'bpmn_process', ['json_data_hash'], unique=False) op.create_table('bpmn_process_definition', sa.Column('id', sa.Integer(), nullable=False), sa.Column('hash', sa.String(length=255), nullable=False), @@ -129,6 +116,21 @@ def upgrade(): sa.UniqueConstraint('service', 'service_id', name='service_key'), sa.UniqueConstraint('username') ) + op.create_table('bpmn_process', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('guid', sa.String(length=36), nullable=True), + sa.Column('bpmn_process_definition_id', sa.Integer(), nullable=False), + sa.Column('parent_process_id', sa.Integer(), nullable=True), + sa.Column('properties_json', sa.JSON(), nullable=False), + sa.Column('json_data_hash', sa.String(length=255), nullable=False), + sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), + sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), + sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ), + sa.ForeignKeyConstraint(['parent_process_id'], ['bpmn_process.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_bpmn_process_guid'), 'bpmn_process', ['guid'], unique=True) + op.create_index(op.f('ix_bpmn_process_json_data_hash'), 'bpmn_process', ['json_data_hash'], unique=False) op.create_table('bpmn_process_definition_relationship', sa.Column('id', sa.Integer(), nullable=False), sa.Column('bpmn_process_definition_parent_id', sa.Integer(), nullable=False), @@ -149,30 +151,6 @@ def upgrade(): sa.UniqueConstraint('group_id'), sa.UniqueConstraint('user_id') ) - op.create_table('process_instance', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('process_model_identifier', sa.String(length=255), nullable=False), - sa.Column('process_model_display_name', sa.String(length=255), nullable=False), - sa.Column('process_initiator_id', sa.Integer(), nullable=False), - sa.Column('bpmn_process_definition_id', sa.Integer(), nullable=True), - sa.Column('bpmn_process_id', sa.Integer(), nullable=True), - sa.Column('spiff_serializer_version', sa.String(length=50), nullable=True), - sa.Column('bpmn_json', sa.JSON(), nullable=True), - sa.Column('start_in_seconds', sa.Integer(), nullable=True), - sa.Column('end_in_seconds', sa.Integer(), nullable=True), - sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), - sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), - sa.Column('status', sa.String(length=50), nullable=True), - sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True), - sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True), - sa.Column('spiff_step', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ), - sa.ForeignKeyConstraint(['bpmn_process_id'], ['bpmn_process.id'], ), - sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index(op.f('ix_process_instance_process_model_display_name'), 'process_instance', ['process_model_display_name'], unique=False) - op.create_index(op.f('ix_process_instance_process_model_identifier'), 'process_instance', ['process_model_identifier'], unique=False) op.create_table('process_instance_report', sa.Column('id', sa.Integer(), nullable=False), sa.Column('identifier', sa.String(length=50), nullable=False), @@ -235,6 +213,41 @@ def upgrade(): sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('username', 'group_id', name='user_group_assignment_staged_unique') ) + op.create_table('permission_assignment', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('principal_id', sa.Integer(), nullable=False), + sa.Column('permission_target_id', sa.Integer(), nullable=False), + sa.Column('grant_type', sa.String(length=50), nullable=False), + sa.Column('permission', sa.String(length=50), nullable=False), + sa.ForeignKeyConstraint(['permission_target_id'], ['permission_target.id'], ), + sa.ForeignKeyConstraint(['principal_id'], ['principal.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('principal_id', 'permission_target_id', 'permission', name='permission_assignment_uniq') + ) + op.create_table('process_instance', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('process_model_identifier', sa.String(length=255), nullable=False), + sa.Column('process_model_display_name', sa.String(length=255), nullable=False), + sa.Column('process_initiator_id', sa.Integer(), nullable=False), + sa.Column('bpmn_process_definition_id', sa.Integer(), nullable=True), + sa.Column('bpmn_process_id', sa.Integer(), nullable=True), + sa.Column('spiff_serializer_version', sa.String(length=50), nullable=True), + sa.Column('bpmn_json', sa.JSON(), nullable=True), + sa.Column('start_in_seconds', sa.Integer(), nullable=True), + sa.Column('end_in_seconds', sa.Integer(), nullable=True), + sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('status', sa.String(length=50), nullable=True), + sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True), + sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True), + sa.Column('spiff_step', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ), + sa.ForeignKeyConstraint(['bpmn_process_id'], ['bpmn_process.id'], ), + sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_process_instance_process_model_display_name'), 'process_instance', ['process_model_display_name'], unique=False) + op.create_index(op.f('ix_process_instance_process_model_identifier'), 'process_instance', ['process_model_identifier'], unique=False) op.create_table('message_instance', sa.Column('id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=True), @@ -252,17 +265,6 @@ def upgrade(): sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id') ) - op.create_table('permission_assignment', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('principal_id', sa.Integer(), nullable=False), - sa.Column('permission_target_id', sa.Integer(), nullable=False), - sa.Column('grant_type', sa.String(length=50), nullable=False), - sa.Column('permission', sa.String(length=50), nullable=False), - sa.ForeignKeyConstraint(['permission_target_id'], ['permission_target.id'], ), - sa.ForeignKeyConstraint(['principal_id'], ['principal.id'], ), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('principal_id', 'permission_target_id', 'permission', name='permission_assignment_uniq') - ) op.create_table('process_instance_file_data', sa.Column('id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=False), @@ -418,8 +420,11 @@ def downgrade(): op.drop_table('process_instance_metadata') op.drop_index(op.f('ix_process_instance_file_data_digest'), table_name='process_instance_file_data') op.drop_table('process_instance_file_data') - op.drop_table('permission_assignment') op.drop_table('message_instance') + op.drop_index(op.f('ix_process_instance_process_model_identifier'), table_name='process_instance') + op.drop_index(op.f('ix_process_instance_process_model_display_name'), table_name='process_instance') + op.drop_table('process_instance') + op.drop_table('permission_assignment') op.drop_table('user_group_assignment_waiting') op.drop_table('user_group_assignment') op.drop_index(op.f('ix_task_definition_bpmn_identifier'), table_name='task_definition') @@ -429,11 +434,11 @@ def downgrade(): op.drop_index(op.f('ix_process_instance_report_identifier'), table_name='process_instance_report') op.drop_index(op.f('ix_process_instance_report_created_by_id'), table_name='process_instance_report') op.drop_table('process_instance_report') - op.drop_index(op.f('ix_process_instance_process_model_identifier'), table_name='process_instance') - op.drop_index(op.f('ix_process_instance_process_model_display_name'), table_name='process_instance') - op.drop_table('process_instance') op.drop_table('principal') op.drop_table('bpmn_process_definition_relationship') + op.drop_index(op.f('ix_bpmn_process_json_data_hash'), table_name='bpmn_process') + op.drop_index(op.f('ix_bpmn_process_guid'), table_name='bpmn_process') + op.drop_table('bpmn_process') op.drop_table('user') op.drop_table('spiff_logging') op.drop_index(op.f('ix_spec_reference_cache_type'), table_name='spec_reference_cache') @@ -450,7 +455,4 @@ def downgrade(): op.drop_index(op.f('ix_bpmn_process_definition_hash'), table_name='bpmn_process_definition') op.drop_index(op.f('ix_bpmn_process_definition_bpmn_identifier'), table_name='bpmn_process_definition') op.drop_table('bpmn_process_definition') - op.drop_index(op.f('ix_bpmn_process_json_data_hash'), table_name='bpmn_process') - op.drop_index(op.f('ix_bpmn_process_guid'), table_name='bpmn_process') - op.drop_table('bpmn_process') # ### end Alembic commands ### diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py index 1eaf200d..8e5fa700 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py @@ -3,6 +3,7 @@ from __future__ import annotations from sqlalchemy import ForeignKey from sqlalchemy.orm import relationship +from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel @@ -18,6 +19,11 @@ class BpmnProcessModel(SpiffworkflowBaseDBModel): id: int = db.Column(db.Integer, primary_key=True) guid: str | None = db.Column(db.String(36), nullable=True, unique=True, index=True) + bpmn_process_definition_id: int = db.Column( + ForeignKey(BpmnProcessDefinitionModel.id), nullable=False # type: ignore + ) + bpmn_process_definition = relationship(BpmnProcessDefinitionModel) + parent_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True) properties_json: dict = db.Column(db.JSON, nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 373d2101..0aac218f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -449,7 +449,10 @@ class ProcessInstanceProcessor: # this caches the bpmn_process_definition_identifier and task_identifier back to the bpmn_process_id # in the database. This is to cut down on database queries while adding new tasks to the database. # Structure: - # { "bpmn_process_definition_identifier": { "task_identifier": task_definition } } + # { "[[BPMN_PROCESS_DEFINITION_IDENTIFIER]]": { + # "[[TASK_IDENTIFIER]]": [[TASK_DEFINITION]], + # "bpmn_process_definition": [[BPMN_PROCESS_DEFINITION]] } + # } # To use from a spiff_task: # [spiff_task.workflow.spec.name][spiff_task.task_spec.name] self.bpmn_definition_to_task_definitions_mappings: dict = {} @@ -523,13 +526,21 @@ class ProcessInstanceProcessor: cls, bpmn_definition_to_task_definitions_mappings: dict, bpmn_process_definition_identifier: str, - task_definition: TaskDefinitionModel, + task_definition: Optional[TaskDefinitionModel] = None, + bpmn_process_definition: Optional[BpmnProcessDefinitionModel] = None, ) -> None: if bpmn_process_definition_identifier not in bpmn_definition_to_task_definitions_mappings: bpmn_definition_to_task_definitions_mappings[bpmn_process_definition_identifier] = {} - bpmn_definition_to_task_definitions_mappings[bpmn_process_definition_identifier][ - task_definition.bpmn_identifier - ] = task_definition + + if task_definition is not None: + bpmn_definition_to_task_definitions_mappings[bpmn_process_definition_identifier][ + task_definition.bpmn_identifier + ] = task_definition + + if bpmn_process_definition is not None: + bpmn_definition_to_task_definitions_mappings[bpmn_process_definition_identifier][ + "bpmn_process_definition" + ] = bpmn_process_definition @classmethod def _get_definition_dict_for_bpmn_process_definition( @@ -537,6 +548,11 @@ class ProcessInstanceProcessor: bpmn_process_definition: BpmnProcessDefinitionModel, bpmn_definition_to_task_definitions_mappings: dict, ) -> dict: + cls._update_bpmn_definition_mappings( + bpmn_definition_to_task_definitions_mappings, + bpmn_process_definition.bpmn_identifier, + bpmn_process_definition=bpmn_process_definition, + ) task_definitions = TaskDefinitionModel.query.filter_by( bpmn_process_definition_id=bpmn_process_definition.id ).all() @@ -549,7 +565,7 @@ class ProcessInstanceProcessor: cls._update_bpmn_definition_mappings( bpmn_definition_to_task_definitions_mappings, bpmn_process_definition.bpmn_identifier, - task_definition, + task_definition=task_definition, ) return bpmn_process_definition_dict @@ -573,6 +589,11 @@ class ProcessInstanceProcessor: bpmn_subprocess_definition_bpmn_identifiers = {} for bpmn_subprocess_definition in bpmn_process_subprocess_definitions: + cls._update_bpmn_definition_mappings( + bpmn_definition_to_task_definitions_mappings, + bpmn_subprocess_definition.bpmn_identifier, + bpmn_process_definition=bpmn_subprocess_definition, + ) bpmn_process_definition_dict: dict = bpmn_subprocess_definition.properties_json spiff_bpmn_process_dict["subprocess_specs"][ bpmn_subprocess_definition.bpmn_identifier @@ -594,7 +615,7 @@ class ProcessInstanceProcessor: cls._update_bpmn_definition_mappings( bpmn_definition_to_task_definitions_mappings, bpmn_subprocess_definition_bpmn_identifier, - task_definition, + task_definition=task_definition, ) spiff_bpmn_process_dict["subprocess_specs"][bpmn_subprocess_definition_bpmn_identifier]["task_specs"][ task_definition.bpmn_identifier @@ -988,6 +1009,11 @@ class ProcessInstanceProcessor: properties_json=process_bpmn_properties, ) db.session.add(bpmn_process_definition) + self._update_bpmn_definition_mappings( + self.bpmn_definition_to_task_definitions_mappings, + bpmn_process_definition.bpmn_identifier, + bpmn_process_definition=bpmn_process_definition, + ) for task_bpmn_identifier, task_bpmn_properties in task_specs.items(): task_definition = TaskDefinitionModel( @@ -1001,11 +1027,16 @@ class ProcessInstanceProcessor: self._update_bpmn_definition_mappings( self.bpmn_definition_to_task_definitions_mappings, process_bpmn_identifier, - task_definition, + task_definition=task_definition, ) elif store_bpmn_definition_mappings: # this should only ever happen when new process instances use a pre-existing bpmn process definitions # otherwise this should get populated on processor initialization + self._update_bpmn_definition_mappings( + self.bpmn_definition_to_task_definitions_mappings, + process_bpmn_identifier, + bpmn_process_definition=bpmn_process_definition, + ) task_definitions = TaskDefinitionModel.query.filter_by( bpmn_process_definition_id=bpmn_process_definition.id ).all() @@ -1013,7 +1044,7 @@ class ProcessInstanceProcessor: self._update_bpmn_definition_mappings( self.bpmn_definition_to_task_definitions_mappings, process_bpmn_identifier, - task_definition, + task_definition=task_definition, ) if bpmn_process_definition_parent is not None: @@ -1044,10 +1075,10 @@ class ProcessInstanceProcessor: ) self.process_instance_model.bpmn_process_definition = bpmn_process_definition_parent - def _add_bpmn_json_records(self) -> None: - """Adds serialized_bpmn_definition and process_instance_data records to the db session. + def _add_bpmn_process_defintions(self) -> None: + """Adds serialized_bpmn_definition records to the db session. - Expects the save method to commit it. + Expects the calling method to commit it. """ if self.process_instance_model.bpmn_process_definition_id is not None: return None @@ -1063,40 +1094,10 @@ class ProcessInstanceProcessor: else: process_instance_data_dict[bpmn_key] = bpmn_dict[bpmn_key] - # if self.process_instance_model.bpmn_process_definition_id is None: self._add_bpmn_process_definitions(bpmn_spec_dict) - # subprocesses = process_instance_data_dict.pop("subprocesses") - # bpmn_process_parent, new_task_models, new_json_data_dicts = TaskService.add_bpmn_process( - # bpmn_process_dict=process_instance_data_dict, - # process_instance=self.process_instance_model, - # bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, - # spiff_workflow=self.bpmn_process_instance, - # serializer=self._serializer, - # ) - # for subprocess_task_id, subprocess_properties in subprocesses.items(): - # ( - # _bpmn_subprocess, - # subprocess_new_task_models, - # subprocess_new_json_data_models, - # ) = TaskService.add_bpmn_process( - # bpmn_process_dict=subprocess_properties, - # process_instance=self.process_instance_model, - # bpmn_process_parent=bpmn_process_parent, - # bpmn_process_guid=subprocess_task_id, - # bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, - # spiff_workflow=self.bpmn_process_instance, - # serializer=self._serializer, - # ) - # new_task_models.update(subprocess_new_task_models) - # new_json_data_dicts.update(subprocess_new_json_data_models) - # db.session.bulk_save_objects(new_task_models.values()) - # - # TaskService.insert_or_update_json_data_records(new_json_data_dicts) - def save(self) -> None: """Saves the current state of this processor to the database.""" - # self._add_bpmn_json_records() self.process_instance_model.spiff_serializer_version = self.SERIALIZER_VERSION complete_states = [TaskState.CANCELLED, TaskState.COMPLETED] @@ -1557,7 +1558,7 @@ class ProcessInstanceProcessor: self._script_engine.environment.revise_state_with_task_data(task) return self.spiff_step_details_mapping(task, start, end) - self._add_bpmn_json_records() + self._add_bpmn_process_defintions() step_delegate = StepDetailLoggingDelegate(self.increment_spiff_step, spiff_step_details_mapping_builder) task_model_delegate = TaskModelSavingDelegate( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index b8d26142..5e7bf88c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -211,6 +211,11 @@ class TaskService: elif bpmn_process.parent_process_id is None: bpmn_process.parent_process_id = bpmn_process_parent.id + bpmn_process_definition = bpmn_definition_to_task_definitions_mappings[spiff_workflow.spec.name][ + "bpmn_process_definition" + ] + bpmn_process.bpmn_process_definition = bpmn_process_definition + # Since we bulk insert tasks later we need to add the bpmn_process to the session # to ensure we have an id. db.session.add(bpmn_process) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index d5af82cd..5f80edc2 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -294,7 +294,6 @@ class WorkflowExecutionService: ) try: - # import pdb; pdb.set_trace() self.bpmn_process_instance.refresh_waiting_tasks() # TODO: implicit re-entrant locks here `with_dequeued` From 8bd946235cbaefdcad3781d48da8d02347161656 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 16 Mar 2023 17:58:43 -0400 Subject: [PATCH 038/162] add in missing fields to logs --- .../routes/process_instances_controller.py | 7 ++ .../services/workflow_execution_service.py | 10 +- .../integration/test_logging_service.py | 94 ++++++++++--------- .../unit/test_process_instance_processor.py | 25 +++-- 4 files changed, 82 insertions(+), 54 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 66e8810d..9be85885 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -1,5 +1,7 @@ """APIs for dealing with process groups, process models, and process instances.""" import base64 +from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel +from spiffworkflow_backend.models.task_definition import TaskDefinitionModel import json from typing import Any from typing import Dict @@ -236,10 +238,15 @@ def process_instance_log_list( log_query = TaskModel.query.filter_by(process_instance_id=process_instance.id) logs = ( log_query.order_by(TaskModel.end_in_seconds.desc()) # type: ignore + .join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id) + .join(BpmnProcessDefinitionModel, BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id) .outerjoin(HumanTaskModel, HumanTaskModel.task_model_id == TaskModel.id) .outerjoin(UserModel, UserModel.id == HumanTaskModel.completed_by_user_id) .add_columns( + TaskModel.guid.label('spiff_task_guid'), UserModel.username, + BpmnProcessDefinitionModel.bpmn_identifier.label('bpmn_process_definition_identifier'), + TaskDefinitionModel.bpmn_identifier.label('task_definition_identifier'), ) .paginate(page=page, per_page=per_page, error_out=False) ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 5f80edc2..fe98ac80 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -78,7 +78,11 @@ class TaskModelSavingDelegate(EngineStepDelegate): def did_complete_task(self, spiff_task: SpiffTask) -> None: if self._should_update_task_model(): - self._update_task_model_with_spiff_task(spiff_task) + task_model = self._update_task_model_with_spiff_task(spiff_task) + if self.current_task_start_in_seconds is None: + raise Exception("Could not find cached current_task_start_in_seconds. This should never have happend") + task_model.start_in_seconds = self.current_task_start_in_seconds + task_model.end_in_seconds = time.time() if self.secondary_engine_step_delegate: self.secondary_engine_step_delegate.did_complete_task(spiff_task) @@ -117,7 +121,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): if json_data_dict is not None: self.json_data_dicts[json_data_dict["hash"]] = json_data_dict - def _update_task_model_with_spiff_task(self, spiff_task: SpiffTask) -> None: + def _update_task_model_with_spiff_task(self, spiff_task: SpiffTask) -> TaskModel: bpmn_process, task_model, new_task_models, new_json_data_dicts = ( TaskService.find_or_create_task_model_from_spiff_task( spiff_task, @@ -137,6 +141,8 @@ class TaskModelSavingDelegate(EngineStepDelegate): json_data_dict_list.append(bpmn_process_json_data) self._update_json_data_dicts_using_list(json_data_dict_list) + return task_model + class StepDetailLoggingDelegate(EngineStepDelegate): """Engine step delegate that takes care of logging spiff step details. diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py index 626aac55..c15b5eeb 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py @@ -1,9 +1,17 @@ """Test_logging_service.""" from flask.app import Flask +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec +from spiffworkflow_backend.services.authorization_service import AuthorizationService from flask.testing import FlaskClient from tests.spiffworkflow_backend.helpers.base_test import BaseTest from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, +) +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) class TestLoggingService(BaseTest): @@ -16,58 +24,52 @@ class TestLoggingService(BaseTest): with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - """Test_process_instance_run.""" - process_group_id = "test_logging_spiff_logger" - process_model_id = "simple_script" - self.create_process_group(client=client, user=with_super_admin_user, process_group_id=process_group_id) - process_model_identifier = f"{process_group_id}/{process_model_id}" - # create the model - self.create_process_model_with_api( - client=client, - process_model_id=process_model_identifier, - process_model_display_name="Simple Script", - process_model_description="Simple Script", - user=with_super_admin_user, - ) + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + initiator_user = self.find_or_create_user("initiator_user") + assert initiator_user.principal is not None + AuthorizationService.import_permissions_from_yaml_file() - bpmn_file_name = "simple_script.bpmn" - bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, "simple_script") - # add bpmn to the model - self.create_spec_file( - client=client, - process_model_id=process_model_identifier, - file_name=bpmn_file_name, - file_data=bpmn_file_data_bytes, - user=with_super_admin_user, + process_model = load_test_spec( + process_model_id="misc/category_number_one/simple_form", + # bpmn_file_name="simp.bpmn", + process_model_source_directory="simple_form", ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=initiator_user + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] + assert len(human_task.potential_owners) == 1 + assert human_task.potential_owners[0] == initiator_user + + spiff_task = processor.__class__.get_task_by_bpmn_identifier( + human_task.task_name, processor.bpmn_process_instance + ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {"name": "HEY"}, initiator_user, human_task) + headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id_with_api( - client, process_model_identifier, headers - ) - assert response.json is not None - process_instance_id = response.json["id"] - response = client.post( - f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", - headers=headers, - ) - assert response.status_code == 200 - log_response = client.get( - f"/v1.0/logs/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}?detailed=true", + f"/v1.0/logs/{self.modify_process_identifier_for_path_param(process_model.id)}/{process_instance.id}?detailed=true", headers=headers, ) assert log_response.status_code == 200 assert log_response.json logs: list = log_response.json["results"] - assert len(logs) == 8 - print(f"logs[0]: {logs[0]}") - # for log in logs: - # assert log["process_instance_id"] == process_instance_id - # for key in [ - # "timestamp", - # "spiff_task_guid", - # "bpmn_task_identifier", - # "bpmn_process_identifier", - # "message", - # ]: - # assert key in log.keys() + assert len(logs) == 7 + + for log in logs: + assert log["process_instance_id"] == process_instance.id + for key in [ + "start_in_seconds", + "end_in_seconds", + "spiff_task_guid", + "bpmn_process_definition_identifier", + "task_definition_identifier", + ]: + assert key in log.keys() + + if log['task_definition_identifier'] == 'Activity_SimpleForm': + assert log['username'] == initiator_user.username diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 0e46cf5f..ce5355e5 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -1,5 +1,7 @@ """Test_process_instance_processor.""" from uuid import UUID +from spiffworkflow_backend.models import bpmn_process_definition +from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel import pytest from flask import g @@ -341,15 +343,18 @@ class TestProcessInstanceProcessor(BaseTest): expected_python_env_data = expected_task_data[spiff_task.task_spec.name] if spiff_task.task_spec.name in spiff_tasks_checked_once: expected_python_env_data = expected_task_data[f"{spiff_task.task_spec.name}_second"] - task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() - assert task.task_definition_id is not None - task_definition = task.task_definition + task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() + + assert task_model.start_in_seconds is not None + assert task_model.end_in_seconds is not None + assert task_model.task_definition_id is not None + task_definition = task_model.task_definition assert task_definition.bpmn_identifier == spiff_task_name assert task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier - message = f"{base_failure_message} Expected: {expected_python_env_data}. Received: {task.json_data()}" + message = f"{base_failure_message} Expected: {expected_python_env_data}. Received: {task_model.json_data()}" # TODO: if we split out env data again we will need to use it here instead of json_data - # assert task.python_env_data() == expected_python_env_data, message - assert task.json_data() == expected_python_env_data, message + # assert task_model.python_env_data() == expected_python_env_data, message + assert task_model.json_data() == expected_python_env_data, message spiff_tasks_checked_once.append(spiff_task.task_spec.name) all_spiff_tasks = processor_final.bpmn_process_instance.get_tasks() @@ -360,6 +365,14 @@ class TestProcessInstanceProcessor(BaseTest): assert_spiff_task_is_in_process("top_level_subprocess_script", "top_level_subprocess") assert_spiff_task_is_in_process("top_level_script", "top_level_process") + if spiff_task.task_spec.name == 'top_level_call_activity': + # the task id / guid of the call activity gets used as the guid of the bpmn process that it calls + bpmn_process = BpmnProcessModel.query.filter_by(guid=str(spiff_task.id)).first() + assert bpmn_process is not None + bpmn_process_definition = bpmn_process.bpmn_process_definition + assert bpmn_process_definition is not None + assert bpmn_process_definition.bpmn_identifier == 'test_process_to_call' + assert processor.get_data() == fifth_data_set def test_does_not_recreate_human_tasks_on_multiple_saves( From 5966e7b3568c380a68794f51bb4723480c79baff Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 16 Mar 2023 22:42:35 -0400 Subject: [PATCH 039/162] fix all deprecation warnings --- spiffworkflow-backend/pyproject.toml | 4 +++- .../src/spiffworkflow_backend/models/bpmn_process.py | 2 +- .../src/spiffworkflow_backend/models/human_task_user.py | 2 +- .../src/spiffworkflow_backend/models/task.py | 2 +- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index 77b4533a..103de8c8 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -127,7 +127,9 @@ filterwarnings = [ "ignore:'_request_ctx_stack' is deprecated and will be removed in Flask 2.3", "ignore:Setting 'json_encoder' on the app or a blueprint is deprecated and will be removed in Flask 2.3", "ignore:'JSONEncoder' is deprecated and will be removed in Flask 2.3", - "ignore:'app.json_encoder' is deprecated and will be removed in Flask 2.3" + "ignore:'app.json_encoder' is deprecated and will be removed in Flask 2.3", + # SpiffWorkflow/bpmn/PythonScriptEngineEnvironment.py + 'ignore:The usage of Box has been deprecated' ] [tool.coverage.paths] diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py index 8e5fa700..24ccbe28 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py @@ -29,7 +29,7 @@ class BpmnProcessModel(SpiffworkflowBaseDBModel): properties_json: dict = db.Column(db.JSON, nullable=False) json_data_hash: str = db.Column(db.String(255), nullable=False, index=True) - tasks = relationship("TaskModel", cascade="delete") # type: ignore + tasks = relationship("TaskModel", back_populates="bpmn_process", cascade="delete") # type: ignore # subprocess or top_level_process # process_type: str = db.Column(db.String(30), nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py index b2219bf4..1d3e4770 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py @@ -30,4 +30,4 @@ class HumanTaskUserModel(SpiffworkflowBaseDBModel): human_task_id = db.Column(ForeignKey(HumanTaskModel.id), nullable=False, index=True) # type: ignore user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore - human_task = relationship(HumanTaskModel) + human_task = relationship(HumanTaskModel, back_populates="human_task_users") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index 95947723..98058071 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -50,7 +50,7 @@ class TaskModel(SpiffworkflowBaseDBModel): id: int = db.Column(db.Integer, primary_key=True) guid: str = db.Column(db.String(36), nullable=False, unique=True, index=True) bpmn_process_id: int = db.Column(ForeignKey(BpmnProcessModel.id), nullable=False) # type: ignore - bpmn_process = relationship(BpmnProcessModel) + bpmn_process = relationship(BpmnProcessModel, back_populates="tasks") process_instance_id: int = db.Column(ForeignKey("process_instance.id"), nullable=False) # find this by looking up the "workflow_name" and "task_spec" from the properties_json From 02c04625f7add36c41daa30e983a260a046f43a8 Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 16 Mar 2023 22:59:42 -0400 Subject: [PATCH 040/162] lint --- .../routes/process_instances_controller.py | 14 ++++++++------ .../integration/test_logging_service.py | 14 +++++++------- .../unit/test_process_instance_processor.py | 11 ++++++----- 3 files changed, 21 insertions(+), 18 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 9be85885..86764e45 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -1,7 +1,5 @@ """APIs for dealing with process groups, process models, and process instances.""" import base64 -from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel -from spiffworkflow_backend.models.task_definition import TaskDefinitionModel import json from typing import Any from typing import Dict @@ -21,6 +19,7 @@ from sqlalchemy import and_ from sqlalchemy import or_ from spiffworkflow_backend.exceptions.api_error import ApiError +from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel @@ -46,6 +45,7 @@ from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.task import Task from spiffworkflow_backend.models.task import TaskModel +from spiffworkflow_backend.models.task_definition import TaskDefinitionModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.routes.process_api_blueprint import ( _find_process_instance_by_id_or_raise, @@ -239,14 +239,16 @@ def process_instance_log_list( logs = ( log_query.order_by(TaskModel.end_in_seconds.desc()) # type: ignore .join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id) - .join(BpmnProcessDefinitionModel, BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id) + .join( + BpmnProcessDefinitionModel, BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id + ) .outerjoin(HumanTaskModel, HumanTaskModel.task_model_id == TaskModel.id) .outerjoin(UserModel, UserModel.id == HumanTaskModel.completed_by_user_id) .add_columns( - TaskModel.guid.label('spiff_task_guid'), + TaskModel.guid.label("spiff_task_guid"), # type: ignore UserModel.username, - BpmnProcessDefinitionModel.bpmn_identifier.label('bpmn_process_definition_identifier'), - TaskDefinitionModel.bpmn_identifier.label('task_definition_identifier'), + BpmnProcessDefinitionModel.bpmn_identifier.label("bpmn_process_definition_identifier"), # type: ignore + TaskDefinitionModel.bpmn_identifier.label("task_definition_identifier"), # type: ignore ) .paginate(page=page, per_page=per_page, error_out=False) ) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py index c15b5eeb..df13ceb6 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py @@ -1,17 +1,17 @@ """Test_logging_service.""" from flask.app import Flask -from tests.spiffworkflow_backend.helpers.test_data import load_test_spec -from spiffworkflow_backend.services.authorization_service import AuthorizationService from flask.testing import FlaskClient from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from spiffworkflow_backend.models.user import UserModel -from spiffworkflow_backend.services.process_instance_service import ( - ProcessInstanceService, -) +from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) +from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, +) class TestLoggingService(BaseTest): @@ -71,5 +71,5 @@ class TestLoggingService(BaseTest): ]: assert key in log.keys() - if log['task_definition_identifier'] == 'Activity_SimpleForm': - assert log['username'] == initiator_user.username + if log["task_definition_identifier"] == "Activity_SimpleForm": + assert log["username"] == initiator_user.username diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index ce5355e5..a1cdec22 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -1,7 +1,5 @@ """Test_process_instance_processor.""" from uuid import UUID -from spiffworkflow_backend.models import bpmn_process_definition -from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel import pytest from flask import g @@ -12,6 +10,7 @@ from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from spiffworkflow_backend.exceptions.api_error import ApiError +from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus @@ -351,7 +350,9 @@ class TestProcessInstanceProcessor(BaseTest): task_definition = task_model.task_definition assert task_definition.bpmn_identifier == spiff_task_name assert task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier - message = f"{base_failure_message} Expected: {expected_python_env_data}. Received: {task_model.json_data()}" + message = ( + f"{base_failure_message} Expected: {expected_python_env_data}. Received: {task_model.json_data()}" + ) # TODO: if we split out env data again we will need to use it here instead of json_data # assert task_model.python_env_data() == expected_python_env_data, message assert task_model.json_data() == expected_python_env_data, message @@ -365,13 +366,13 @@ class TestProcessInstanceProcessor(BaseTest): assert_spiff_task_is_in_process("top_level_subprocess_script", "top_level_subprocess") assert_spiff_task_is_in_process("top_level_script", "top_level_process") - if spiff_task.task_spec.name == 'top_level_call_activity': + if spiff_task.task_spec.name == "top_level_call_activity": # the task id / guid of the call activity gets used as the guid of the bpmn process that it calls bpmn_process = BpmnProcessModel.query.filter_by(guid=str(spiff_task.id)).first() assert bpmn_process is not None bpmn_process_definition = bpmn_process.bpmn_process_definition assert bpmn_process_definition is not None - assert bpmn_process_definition.bpmn_identifier == 'test_process_to_call' + assert bpmn_process_definition.bpmn_identifier == "test_process_to_call" assert processor.get_data() == fifth_data_set From a2c451537bf8f1645f2f89b5fc2699a0f5fe623d Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 16 Mar 2023 23:10:36 -0400 Subject: [PATCH 041/162] add bin/reorder_python_imports_in_backend script --- bin/reorder_python_imports_in_backend | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100755 bin/reorder_python_imports_in_backend diff --git a/bin/reorder_python_imports_in_backend b/bin/reorder_python_imports_in_backend new file mode 100755 index 00000000..100539ec --- /dev/null +++ b/bin/reorder_python_imports_in_backend @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +poetry run reorder-python-imports --application-directories=spiffworkflow-backend/src $(find spiffworkflow-backend/src -name '*.py' -type f -not -path '*load_database_models.py' -not -path '*/migrations/*') From c91c6f9279cf106f87c32d4cb962c40a2962604d Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 16 Mar 2023 23:11:17 -0400 Subject: [PATCH 042/162] comment script --- bin/reorder_python_imports_in_backend | 1 + 1 file changed, 1 insertion(+) diff --git a/bin/reorder_python_imports_in_backend b/bin/reorder_python_imports_in_backend index 100539ec..38595b29 100755 --- a/bin/reorder_python_imports_in_backend +++ b/bin/reorder_python_imports_in_backend @@ -7,4 +7,5 @@ function error_handler() { trap 'error_handler ${LINENO} $?' ERR set -o errtrace -o errexit -o nounset -o pipefail +# this intends to replicate the behavior of the pre-commit hook poetry run reorder-python-imports --application-directories=spiffworkflow-backend/src $(find spiffworkflow-backend/src -name '*.py' -type f -not -path '*load_database_models.py' -not -path '*/migrations/*') From 871c37f134f77cb25954b3ad6d3f115e3e708364 Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 16 Mar 2023 23:19:12 -0400 Subject: [PATCH 043/162] bump sphinx_click version --- poetry.lock | 3982 +++++++++++++++++++++++++-------------------------- 1 file changed, 1991 insertions(+), 1991 deletions(-) diff --git a/poetry.lock b/poetry.lock index fb3397a4..b71632be 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,3 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. - [[package]] name = "alabaster" version = "0.7.12" @@ -7,10 +5,6 @@ description = "A configurable sidebar-enabled Sphinx theme" category = "main" optional = false python-versions = "*" -files = [ - {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, - {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, -] [[package]] name = "alembic" @@ -19,10 +13,6 @@ description = "A database migration tool for SQLAlchemy." category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "alembic-1.8.1-py3-none-any.whl", hash = "sha256:0a024d7f2de88d738d7395ff866997314c837be6104e90c5724350313dee4da4"}, - {file = "alembic-1.8.1.tar.gz", hash = "sha256:cd0b5e45b14b706426b833f06369b9a6d5ee03f826ec3238723ce8caaf6e5ffa"}, -] [package.dependencies] Mako = "*" @@ -38,10 +28,6 @@ description = "Low-level AMQP client for Python (fork of amqplib)." category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "amqp-5.1.1-py3-none-any.whl", hash = "sha256:6f0956d2c23d8fa6e7691934d8c3930eadb44972cbbd1a7ae3a520f735d43359"}, - {file = "amqp-5.1.1.tar.gz", hash = "sha256:2c1b13fecc0893e946c65cbd5f36427861cffa4ea2201d8f6fca22e2a373b5e2"}, -] [package.dependencies] vine = ">=5.0.0" @@ -53,10 +39,6 @@ description = "A library for parsing ISO 8601 strings." category = "main" optional = false python-versions = "*" -files = [ - {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, - {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, -] [package.extras] dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] @@ -68,10 +50,6 @@ description = "In-process task scheduler with Cron-like capabilities" category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "APScheduler-3.10.0-py3-none-any.whl", hash = "sha256:575299f20073c60a2cc9d4fa5906024cdde33c5c0ce6087c4e3c14be3b50fdd4"}, - {file = "APScheduler-3.10.0.tar.gz", hash = "sha256:a49fc23269218416f0e41890eea7a75ed6b284f10630dcfe866ab659621a3696"}, -] [package.dependencies] pytz = "*" @@ -98,10 +76,6 @@ description = "An abstract syntax tree for Python with inference support." category = "main" optional = false python-versions = ">=3.7.2" -files = [ - {file = "astroid-2.12.12-py3-none-any.whl", hash = "sha256:72702205200b2a638358369d90c222d74ebc376787af8fb2f7f2a86f7b5cc85f"}, - {file = "astroid-2.12.12.tar.gz", hash = "sha256:1c00a14f5a3ed0339d38d2e2e5b74ea2591df5861c0936bb292b84ccf3a78d83"}, -] [package.dependencies] lazy-object-proxy = ">=1.4.0" @@ -114,10 +88,6 @@ description = "Classes Without Boilerplate" category = "main" optional = false python-versions = ">=3.5" -files = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, -] [package.extras] dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] @@ -132,10 +102,6 @@ description = "Internationalization utilities" category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, - {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, -] [package.dependencies] pytz = ">=2015.7" @@ -147,10 +113,6 @@ description = "Security oriented static analyser for python code." category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "bandit-1.7.2-py3-none-any.whl", hash = "sha256:e20402cadfd126d85b68ed4c8862959663c8c372dbbb1fca8f8e2c9f55a067ec"}, - {file = "bandit-1.7.2.tar.gz", hash = "sha256:6d11adea0214a43813887bfe71a377b5a9955e4c826c8ffd341b494e3ab25260"}, -] [package.dependencies] colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} @@ -170,29 +132,6 @@ description = "Modern password hashing for your software and your servers" category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, - {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, - {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, - {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, -] [package.extras] tests = ["pytest (>=3.2.1,!=3.3.0)"] @@ -205,10 +144,6 @@ description = "Screen-scraping library" category = "dev" optional = false python-versions = ">=3.6.0" -files = [ - {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, - {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, -] [package.dependencies] soupsieve = ">1.2" @@ -224,10 +159,6 @@ description = "Python multiprocessing fork with improvements and bugfixes" category = "main" optional = false python-versions = "*" -files = [ - {file = "billiard-3.6.4.0-py3-none-any.whl", hash = "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b"}, - {file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"}, -] [[package]] name = "black" @@ -236,20 +167,6 @@ description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "black-23.1a1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fb7641d442ede92538bc70fa0201f884753a7d0f62f26c722b7b00301b95902"}, - {file = "black-23.1a1-cp310-cp310-win_amd64.whl", hash = "sha256:88288a645402106b8eb9f50d7340ae741e16240bb01c2eed8466549153daa96e"}, - {file = "black-23.1a1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4db1d8027ce7ae53f0ccf02b0be0b8808fefb291d6cb1543420f4165d96d364c"}, - {file = "black-23.1a1-cp311-cp311-win_amd64.whl", hash = "sha256:88ec25a64063945b4591b6378bead544c5d3260de1c93ad96f3ad2d76ddd76fd"}, - {file = "black-23.1a1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dff6f0157e47fbbeada046fca144b6557d3be2fb2602d668881cd179f04a352"}, - {file = "black-23.1a1-cp37-cp37m-win_amd64.whl", hash = "sha256:ca658b69260a18bf7aa0b0a6562dbbd304a737487d1318998aaca5a75901fd2c"}, - {file = "black-23.1a1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85dede655442f5e246e7abd667fe07e14916897ba52f3640b5489bf11f7dbf67"}, - {file = "black-23.1a1-cp38-cp38-win_amd64.whl", hash = "sha256:ddbf9da228726d46f45c29024263e160d41030a415097254817d65127012d1a2"}, - {file = "black-23.1a1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63330069d8ec909cf4e2c4d43a7f00aeb03335430ef9fec6cd2328e6ebde8a77"}, - {file = "black-23.1a1-cp39-cp39-win_amd64.whl", hash = "sha256:793c9176beb2adf295f6b863d9a4dc953fe2ac359ca3da108d71d14cb2c09e52"}, - {file = "black-23.1a1-py3-none-any.whl", hash = "sha256:e88e4b633d64b9e7adc4a6b922f52bb204af9f90d7b1e3317e6490f2b598b1ea"}, - {file = "black-23.1a1.tar.gz", hash = "sha256:0b945a5a1e5a5321f884de0061d5a8585d947c9b608e37b6d26ceee4dfdf4b62"}, -] [package.dependencies] click = ">=8.0.0" @@ -270,10 +187,6 @@ description = "Fast, simple object-to-object and broadcast signaling" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "blinker-1.5-py2.py3-none-any.whl", hash = "sha256:1eb563df6fdbc39eeddc177d953203f99f097e9bf0e2b8f9f3cf18b6ca425e36"}, - {file = "blinker-1.5.tar.gz", hash = "sha256:923e5e2f69c155f2cc42dafbbd70e16e3fde24d2d4aa2ab72fbe386238892462"}, -] [[package]] name = "celery" @@ -282,10 +195,6 @@ description = "Distributed Task Queue." category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "celery-5.2.7-py3-none-any.whl", hash = "sha256:138420c020cd58d6707e6257b6beda91fd39af7afde5d36c6334d175302c0e14"}, - {file = "celery-5.2.7.tar.gz", hash = "sha256:fafbd82934d30f8a004f81e8f7a062e31413a23d444be8ee3326553915958c6d"}, -] [package.dependencies] billiard = ">=3.6.4.0,<4.0" @@ -337,10 +246,6 @@ description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, -] [[package]] name = "cfgv" @@ -349,10 +254,6 @@ description = "Validate configuration and produce human readable error messages. category = "dev" optional = false python-versions = ">=3.6.1" -files = [ - {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, - {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, -] [[package]] name = "charset-normalizer" @@ -361,10 +262,6 @@ description = "The Real First Universal Charset Detector. Open, modern and activ category = "main" optional = false python-versions = ">=3.6.0" -files = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, -] [package.extras] unicode-backport = ["unicodedata2"] @@ -376,10 +273,6 @@ description = "Utilities for refactoring imports in python-like syntax." category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "classify_imports-4.2.0-py2.py3-none-any.whl", hash = "sha256:dbbc264b70a470ed8c6c95976a11dfb8b7f63df44ed1af87328bbed2663f5161"}, - {file = "classify_imports-4.2.0.tar.gz", hash = "sha256:7abfb7ea92149b29d046bd34573d247ba6e68cc28100c801eba4af17964fc40e"}, -] [[package]] name = "click" @@ -388,10 +281,6 @@ description = "Composable command line interface toolkit" category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, -] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} @@ -403,10 +292,6 @@ description = "Enables git-like *did-you-mean* feature in click" category = "main" optional = false python-versions = ">=3.6.2,<4.0.0" -files = [ - {file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"}, - {file = "click_didyoumean-0.3.0-py3-none-any.whl", hash = "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667"}, -] [package.dependencies] click = ">=7" @@ -418,10 +303,6 @@ description = "An extension module for click to enable registering CLI commands category = "main" optional = false python-versions = "*" -files = [ - {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, - {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, -] [package.dependencies] click = ">=4.0" @@ -436,10 +317,6 @@ description = "REPL plugin for Click" category = "main" optional = false python-versions = "*" -files = [ - {file = "click-repl-0.2.0.tar.gz", hash = "sha256:cd12f68d745bf6151210790540b4cb064c7b13e571bc64b6957d98d120dacfd8"}, - {file = "click_repl-0.2.0-py3-none-any.whl", hash = "sha256:94b3fbbc9406a236f176e0506524b2937e4b23b6f4c0c0b2a0a83f8a64e9194b"}, -] [package.dependencies] click = "*" @@ -453,10 +330,6 @@ description = "Click utility functions" category = "main" optional = false python-versions = "*" -files = [ - {file = "clickclick-20.10.2-py2.py3-none-any.whl", hash = "sha256:c8f33e6d9ec83f68416dd2136a7950125bd256ec39ccc9a85c6e280a16be2bb5"}, - {file = "clickclick-20.10.2.tar.gz", hash = "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c"}, -] [package.dependencies] click = ">=4.0" @@ -469,10 +342,6 @@ description = "Cross-platform colored terminal text." category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] [[package]] name = "configparser" @@ -481,10 +350,6 @@ description = "Updated configparser from stdlib for earlier Pythons." category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "configparser-5.3.0-py3-none-any.whl", hash = "sha256:b065779fd93c6bf4cee42202fa4351b4bb842e96a3fb469440e484517a49b9fa"}, - {file = "configparser-5.3.0.tar.gz", hash = "sha256:8be267824b541c09b08db124917f48ab525a6c3e837011f3130781a224c57090"}, -] [package.extras] docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] @@ -497,10 +362,6 @@ description = "Connexion - API first applications with OpenAPI/Swagger and Flask category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "connexion-2.14.1-py2.py3-none-any.whl", hash = "sha256:f343717241b4c4802a694c38fee66fb1693c897fe4ea5a957fa9b3b07caf6394"}, - {file = "connexion-2.14.1.tar.gz", hash = "sha256:99aa5781e70a7b94f8ffae8cf89f309d49cdb811bbd65a8e2f2546f3b19a01e6"}, -] [package.dependencies] clickclick = ">=1.2,<21" @@ -528,7 +389,1860 @@ description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" -files = [ + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "darglint" +version = "1.8.1" +description = "A utility for ensuring Google-style docstrings stay up to date with the source code." +category = "dev" +optional = false +python-versions = ">=3.6,<4.0" + +[[package]] +name = "distlib" +version = "0.3.6" +description = "Distribution utilities" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "docutils" +version = "0.19" +description = "Docutils -- Python Documentation Utilities" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "dparse" +version = "0.6.2" +description = "A parser for Python dependency files" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +packaging = "*" +toml = "*" + +[package.extras] +conda = ["pyyaml"] +pipenv = ["pipenv"] + +[[package]] +name = "ecdsa" +version = "0.18.0" +description = "ECDSA cryptographic signature library (pure python)" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[package.dependencies] +six = ">=1.9.0" + +[package.extras] +gmpy = ["gmpy"] +gmpy2 = ["gmpy2"] + +[[package]] +name = "filelock" +version = "3.8.0" +description = "A platform independent file lock." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] +testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "flake8" +version = "4.0.1" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.8.0,<2.9.0" +pyflakes = ">=2.4.0,<2.5.0" + +[[package]] +name = "flake8-bandit" +version = "2.1.2" +description = "Automated security testing with bandit and flake8." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +bandit = "*" +flake8 = "*" +flake8-polyfill = "*" +pycodestyle = "*" + +[[package]] +name = "flake8-bugbear" +version = "22.10.27" +description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +attrs = ">=19.2.0" +flake8 = ">=3.0.0" + +[package.extras] +dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"] + +[[package]] +name = "flake8-docstrings" +version = "1.6.0" +description = "Extension for flake8 which uses pydocstyle to check docstrings" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +flake8 = ">=3" +pydocstyle = ">=2.1" + +[[package]] +name = "flake8-polyfill" +version = "1.0.2" +description = "Polyfill package for Flake8 plugins" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +flake8 = "*" + +[[package]] +name = "flake8-rst-docstrings" +version = "0.2.7" +description = "Python docstring reStructuredText (RST) validator" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +flake8 = ">=3.0.0" +pygments = "*" +restructuredtext-lint = "*" + +[[package]] +name = "flask" +version = "2.2.2" +description = "A simple framework for building complex web applications." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +click = ">=8.0" +itsdangerous = ">=2.0" +Jinja2 = ">=3.0" +Werkzeug = ">=2.2.2" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "flask-admin" +version = "1.6.0" +description = "Simple and extensible admin interface framework for Flask" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +Flask = ">=0.7" +wtforms = "*" + +[package.extras] +aws = ["boto"] +azure = ["azure-storage-blob"] + +[[package]] +name = "flask-bcrypt" +version = "1.0.1" +description = "Brcrypt hashing for Flask." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +bcrypt = ">=3.1.1" +Flask = "*" + +[[package]] +name = "flask-bpmn" +version = "0.0.0" +description = "Flask Bpmn" +category = "main" +optional = false +python-versions = "^3.7" +develop = false + +[package.dependencies] +click = "^8.0.1" +flask = "*" +flask-admin = "*" +flask-bcrypt = "*" +flask-cors = "*" +flask-mail = "*" +flask-marshmallow = "*" +flask-migrate = "*" +flask-restful = "*" +greenlet = "^2.0.1" +sentry-sdk = "*" +sphinx-autoapi = "^2.0.0" +spiffworkflow = "*" +werkzeug = "*" + +[package.source] +type = "git" +url = "https://github.com/sartography/flask-bpmn" +reference = "main" +resolved_reference = "c18306300f4312b8d36e0197fd6b62399180d0b1" + +[[package]] +name = "flask-cors" +version = "3.0.10" +description = "A Flask extension adding a decorator for CORS support" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +Flask = ">=0.9" +Six = "*" + +[[package]] +name = "flask-mail" +version = "0.9.1" +description = "Flask extension for sending email" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +blinker = "*" +Flask = "*" + +[[package]] +name = "flask-marshmallow" +version = "0.14.0" +description = "Flask + marshmallow for beautiful APIs" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +Flask = "*" +marshmallow = ">=2.0.0" +six = ">=1.9.0" + +[package.extras] +dev = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] +docs = ["Sphinx (==3.2.1)", "marshmallow-sqlalchemy (>=0.13.0)", "sphinx-issues (==1.2.0)"] +lint = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "pre-commit (>=2.4,<3.0)"] +sqlalchemy = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)"] +tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pytest"] + +[[package]] +name = "flask-migrate" +version = "3.1.0" +description = "SQLAlchemy database migrations for Flask applications using Alembic." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +alembic = ">=0.7" +Flask = ">=0.9" +Flask-SQLAlchemy = ">=1.0" + +[[package]] +name = "flask-restful" +version = "0.3.9" +description = "Simple framework for creating REST APIs" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +aniso8601 = ">=0.82" +Flask = ">=0.8" +pytz = "*" +six = ">=1.3.0" + +[package.extras] +docs = ["sphinx"] + +[[package]] +name = "flask-sqlalchemy" +version = "3.0.2" +description = "Add SQLAlchemy support to your Flask application." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +Flask = ">=2.2" +SQLAlchemy = ">=1.4.18" + +[[package]] +name = "furo" +version = "2022.9.29" +description = "A clean customisable Sphinx documentation theme." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +beautifulsoup4 = "*" +pygments = ">=2.7" +sphinx = ">=4.0,<6.0" +sphinx-basic-ng = "*" + +[[package]] +name = "gitdb" +version = "4.0.9" +description = "Git Object Database" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.29" +description = "GitPython is a python library used to interact with Git repositories" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[[package]] +name = "greenlet" +version = "2.0.1" +description = "Lightweight in-process concurrent programming" +category = "main" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" + +[package.extras] +docs = ["Sphinx", "docutils (<0.18)"] +test = ["faulthandler", "objgraph", "psutil"] + +[[package]] +name = "gunicorn" +version = "20.1.0" +description = "WSGI HTTP Server for UNIX" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +setuptools = ">=3.0" + +[package.extras] +eventlet = ["eventlet (>=0.24.1)"] +gevent = ["gevent (>=1.4.0)"] +setproctitle = ["setproctitle"] +tornado = ["tornado (>=0.2)"] + +[[package]] +name = "identify" +version = "2.5.7" +description = "File identification library for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "inflection" +version = "0.5.1" +description = "A port of Ruby on Rails inflector to Python" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "itsdangerous" +version = "2.1.2" +description = "Safely pass data to untrusted environments and back." +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonschema" +version = "4.16.0" +description = "An implementation of JSON Schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "kombu" +version = "5.2.4" +description = "Messaging library for Python." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +amqp = ">=5.0.9,<6.0.0" +vine = "*" + +[package.extras] +azureservicebus = ["azure-servicebus (>=7.0.0)"] +azurestoragequeues = ["azure-storage-queue"] +consul = ["python-consul (>=0.6.0)"] +librabbitmq = ["librabbitmq (>=2.0.0)"] +mongodb = ["pymongo (>=3.3.0,<3.12.1)"] +msgpack = ["msgpack"] +pyro = ["pyro4"] +qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] +redis = ["redis (>=3.4.1,!=4.0.0,!=4.0.1)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +sqlalchemy = ["sqlalchemy"] +sqs = ["boto3 (>=1.9.12)", "pycurl (>=7.44.1,<7.45.0)", "urllib3 (>=1.26.7)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=1.3.1)"] + +[[package]] +name = "lazy-object-proxy" +version = "1.8.0" +description = "A fast and thorough lazy object proxy." +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "livereload" +version = "2.6.3" +description = "Python LiveReload is an awesome tool for web developers" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" +tornado = {version = "*", markers = "python_version > \"2.7\""} + +[[package]] +name = "lxml" +version = "4.9.1" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=0.29.7)"] + +[[package]] +name = "mako" +version = "1.2.3" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markupsafe" +version = "2.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "marshmallow" +version = "3.18.0" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "mypy (==0.971)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"] +docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] +lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "mypy (==0.971)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "marshmallow-enum" +version = "1.5.1" +description = "Enum field for Marshmallow" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +marshmallow = ">=2.0.0" + +[[package]] +name = "marshmallow-sqlalchemy" +version = "0.28.1" +description = "SQLAlchemy integration with the marshmallow (de)serialization library" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +marshmallow = ">=3.0.0" +packaging = ">=21.3" +SQLAlchemy = ">=1.3.0" + +[package.extras] +dev = ["flake8 (==4.0.1)", "flake8-bugbear (==22.7.1)", "pre-commit (>=2.0,<3.0)", "pytest", "pytest-lazy-fixture (>=0.6.2)", "tox"] +docs = ["alabaster (==0.7.12)", "sphinx (==4.4.0)", "sphinx-issues (==3.0.1)"] +lint = ["flake8 (==4.0.1)", "flake8-bugbear (==22.7.1)", "pre-commit (>=2.0,<3.0)"] +tests = ["pytest", "pytest-lazy-fixture (>=0.6.2)"] + +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "mypy" +version = "0.982" +description = "Optional static typing for Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +mypy-extensions = ">=0.4.3" +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "mysql-connector-python" +version = "8.0.31" +description = "MySQL driver written in Python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +protobuf = ">=3.11.0,<=3.20.1" + +[package.extras] +compression = ["lz4 (>=2.1.6,<=3.1.3)", "zstandard (>=0.12.0,<=0.15.2)"] +dns-srv = ["dnspython (>=1.16.0,<=2.1.0)"] +gssapi = ["gssapi (>=1.6.9,<=1.8.1)"] + +[[package]] +name = "nodeenv" +version = "1.7.0" +description = "Node.js virtual environment builder" +category = "dev" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "pathspec" +version = "0.10.1" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pbr" +version = "5.11.0" +description = "Python Build Reasonableness" +category = "dev" +optional = false +python-versions = ">=2.6" + +[[package]] +name = "pep8-naming" +version = "0.13.2" +description = "Check PEP-8 naming conventions, plugin for flake8" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +flake8 = ">=3.9.1" + +[[package]] +name = "platformdirs" +version = "2.5.2" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "2.20.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +toml = "*" +virtualenv = ">=20.0.8" + +[[package]] +name = "pre-commit-hooks" +version = "4.3.0" +description = "Some out-of-the-box hooks for pre-commit." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +"ruamel.yaml" = ">=0.15" + +[[package]] +name = "prompt-toolkit" +version = "3.0.31" +description = "Library for building powerful interactive command lines in Python" +category = "main" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "protobuf" +version = "3.20.1" +description = "Protocol Buffers" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "psycopg2" +version = "2.9.5" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "pyasn1" +version = "0.4.8" +description = "ASN.1 types and codecs" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "pycodestyle" +version = "2.8.0" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pydocstyle" +version = "6.1.1" +description = "Python docstring style checker" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +snowballstemmer = "*" + +[package.extras] +toml = ["toml"] + +[[package]] +name = "pyflakes" +version = "2.4.0" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pygments" +version = "2.13.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pyjwt" +version = "2.6.0" +description = "JSON Web Token implementation in Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pyparsing" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" +optional = false +python-versions = ">=3.6.8" + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyrsistent" +version = "0.18.1" +description = "Persistent/Functional/Immutable data structures" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pytest" +version = "7.2.0" +description = "pytest: simple powerful testing with Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "pytest-flask" +version = "1.2.0" +description = "A set of py.test fixtures to test Flask applications." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +Flask = "*" +pytest = ">=5.2" +Werkzeug = ">=0.7" + +[package.extras] +docs = ["Sphinx", "sphinx-rtd-theme"] + +[[package]] +name = "pytest-flask-sqlalchemy" +version = "1.1.0" +description = "A pytest plugin for preserving test isolation in Flask-SQlAlchemy using database transactions." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +Flask-SQLAlchemy = ">=2.3" +packaging = ">=14.1" +pytest = ">=3.2.1" +pytest-mock = ">=1.6.2" +SQLAlchemy = ">=1.2.2" + +[package.extras] +tests = ["psycopg2-binary", "pytest (>=6.0.1)", "pytest-postgresql (>=2.4.0,<4.0.0)"] + +[[package]] +name = "pytest-mock" +version = "3.10.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-jose" +version = "3.3.0" +description = "JOSE implementation in Python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +ecdsa = "!=0.15" +pyasn1 = "*" +rsa = "*" + +[package.extras] +cryptography = ["cryptography (>=3.4.0)"] +pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] +pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] + +[[package]] +name = "python-keycloak" +version = "2.6.0" +description = "python-keycloak is a Python package providing access to the Keycloak API." +category = "main" +optional = false +python-versions = ">=3.7,<4.0" + +[package.dependencies] +python-jose = ">=3.3.0,<4.0.0" +requests = ">=2.20.0,<3.0.0" +requests-toolbelt = ">=0.9.1,<0.10.0" +urllib3 = ">=1.26.0,<2.0.0" + +[package.extras] +docs = ["Sphinx (>=5.0.2,<6.0.0)", "alabaster (>=0.7.12,<0.8.0)", "commonmark (>=0.9.1,<0.10.0)", "m2r2 (>=0.3.2,<0.4.0)", "mock (>=4.0.3,<5.0.0)", "readthedocs-sphinx-ext (>=2.1.8,<3.0.0)", "recommonmark (>=0.7.1,<0.8.0)", "sphinx-autoapi (>=1.8.4,<2.0.0)", "sphinx-rtd-theme (>=1.0.0,<2.0.0)"] + +[[package]] +name = "pytz" +version = "2022.5" +description = "World timezone definitions, modern and historical" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "pytz-deprecation-shim" +version = "0.1.0.post0" +description = "Shims to make deprecation of pytz easier" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" + +[package.dependencies] +tzdata = {version = "*", markers = "python_version >= \"3.6\""} + +[[package]] +name = "pyupgrade" +version = "3.1.0" +description = "A tool to automatically upgrade syntax for newer versions." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +tokenize-rt = ">=3.2.0" + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "reorder-python-imports" +version = "3.9.0" +description = "Tool for reordering python imports" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +classify-imports = ">=4.1" + +[[package]] +name = "requests" +version = "2.28.1" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=3.7, <4" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<3" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-toolbelt" +version = "0.9.1" +description = "A utility belt for advanced users of python-requests" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "restrictedpython" +version = "6.0" +description = "RestrictedPython is a defined subset of the Python language which allows to provide a program input into a trusted environment." +category = "main" +optional = false +python-versions = ">=3.6, <3.12" + +[package.extras] +docs = ["Sphinx", "sphinx-rtd-theme"] +test = ["pytest", "pytest-mock"] + +[[package]] +name = "restructuredtext-lint" +version = "1.4.0" +description = "reStructuredText linter" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +docutils = ">=0.11,<1.0" + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +category = "main" +optional = false +python-versions = ">=3.6,<4" + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "ruamel-yaml" +version = "0.17.21" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +category = "dev" +optional = false +python-versions = ">=3" + +[package.extras] +docs = ["ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "safety" +version = "2.3.1" +description = "Checks installed dependencies for known vulnerabilities and licenses." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +Click = ">=8.0.2" +dparse = ">=0.6.2" +packaging = ">=21.0" +requests = "*" +"ruamel.yaml" = ">=0.17.21" +setuptools = ">=19.3" + +[package.extras] +github = ["jinja2 (>=3.1.0)", "pygithub (>=1.43.3)"] +gitlab = ["python-gitlab (>=1.3.0)"] + +[[package]] +name = "sentry-sdk" +version = "1.10.1" +description = "Python client for Sentry (https://sentry.io)" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +certifi = "*" +urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} + +[package.extras] +aiohttp = ["aiohttp (>=3.5)"] +beam = ["apache-beam (>=2.12)"] +bottle = ["bottle (>=0.12.13)"] +celery = ["celery (>=3)"] +chalice = ["chalice (>=1.16.0)"] +django = ["django (>=1.8)"] +falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)"] +httpx = ["httpx (>=0.16.0)"] +pure-eval = ["asttokens", "executing", "pure-eval"] +pyspark = ["pyspark (>=2.4.4)"] +quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] +rq = ["rq (>=0.6)"] +sanic = ["sanic (>=0.8)"] +sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] +tornado = ["tornado (>=5)"] + +[[package]] +name = "setuptools" +version = "65.5.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "simplejson" +version = "3.17.6" +description = "Simple, fast, extensible JSON encoder/decoder for Python" +category = "main" +optional = false +python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "smmap" +version = "5.0.0" +description = "A pure Python implementation of a sliding window memory map manager" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "soupsieve" +version = "2.3.2.post1" +description = "A modern CSS selector implementation for Beautiful Soup." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "sphinx" +version = "5.3.0" +description = "Python documentation generator" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.20" +imagesize = ">=1.3" +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.12" +requests = ">=2.5.0" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] + +[[package]] +name = "sphinx-autoapi" +version = "2.0.0" +description = "Sphinx API documentation generator" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +astroid = ">=2.7" +Jinja2 = "*" +PyYAML = "*" +sphinx = ">=4.0" +unidecode = "*" + +[package.extras] +docs = ["sphinx", "sphinx-rtd-theme"] +dotnet = ["sphinxcontrib-dotnetdomain"] +go = ["sphinxcontrib-golangdomain"] + +[[package]] +name = "sphinx-autobuild" +version = "2021.3.14" +description = "Rebuild Sphinx documentation on changes, with live-reload in the browser." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +colorama = "*" +livereload = "*" +sphinx = "*" + +[package.extras] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "sphinx-basic-ng" +version = "1.0.0b1" +description = "A modern skeleton for Sphinx themes." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +sphinx = ">=4.0" + +[package.extras] +docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"] + +[[package]] +name = "sphinx-click" +version = "4.4.0" +description = "Sphinx extension that automatically documents click applications" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +click = ">=7.0" +docutils = "*" +sphinx = ">=2.0" + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.2" +description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.0" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "SpiffWorkflow" +version = "1.2.1" +description = "A workflow framework and BPMN/DMN Processor" +category = "main" +optional = false +python-versions = "*" +develop = false + +[package.dependencies] +celery = "*" +configparser = "*" +lxml = "*" + +[package.source] +type = "git" +url = "https://github.com/sartography/SpiffWorkflow" +reference = "main" +resolved_reference = "f162aac43af3af18d1a55186aeccea154fb8b05d" + +[[package]] +name = "sqlalchemy" +version = "1.4.42" +description = "Database Abstraction Library" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} + +[package.extras] +aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] +mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +pymysql = ["pymysql", "pymysql (<1)"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "sqlalchemy-stubs" +version = "0.4" +description = "" +category = "main" +optional = false +python-versions = "*" +develop = false + +[package.dependencies] +mypy = ">=0.790" +typing-extensions = ">=3.7.4" + +[package.source] +type = "git" +url = "https://github.com/burnettk/sqlalchemy-stubs.git" +reference = "scoped-session-delete" +resolved_reference = "d1176931684ce5b327539cc9567d4a1cd8ef1efd" + +[[package]] +name = "stevedore" +version = "4.1.0" +description = "Manage dynamic plugins for Python applications" +category = "dev" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +pbr = ">=2.0.0,<2.1.0 || >2.1.0" + +[[package]] +name = "swagger-ui-bundle" +version = "0.0.9" +description = "swagger_ui_bundle - swagger-ui files in a pip package" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +Jinja2 = ">=2.0" + +[[package]] +name = "tokenize-rt" +version = "5.0.0" +description = "A wrapper around the stdlib `tokenize` which roundtrips." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "tornado" +version = "6.2" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "dev" +optional = false +python-versions = ">= 3.7" + +[[package]] +name = "typeguard" +version = "2.13.3" +description = "Run-time type checker for Python" +category = "dev" +optional = false +python-versions = ">=3.5.3" + +[package.extras] +doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["mypy", "pytest", "typing-extensions"] + +[[package]] +name = "types-click" +version = "7.1.8" +description = "Typing stubs for click" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "types-flask" +version = "1.1.6" +description = "Typing stubs for Flask" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +types-click = "*" +types-Jinja2 = "*" +types-Werkzeug = "*" + +[[package]] +name = "types-jinja2" +version = "2.11.9" +description = "Typing stubs for Jinja2" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +types-MarkupSafe = "*" + +[[package]] +name = "types-markupsafe" +version = "1.1.10" +description = "Typing stubs for MarkupSafe" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "types-pytz" +version = "2022.5.0.0" +description = "Typing stubs for pytz" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "types-pyyaml" +version = "6.0.12.1" +description = "Typing stubs for PyYAML" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "types-requests" +version = "2.28.11.2" +description = "Typing stubs for requests" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +types-urllib3 = "<1.27" + +[[package]] +name = "types-urllib3" +version = "1.26.25.1" +description = "Typing stubs for urllib3" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "types-werkzeug" +version = "1.0.9" +description = "Typing stubs for Werkzeug" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "typing-extensions" +version = "4.4.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "tzdata" +version = "2022.5" +description = "Provider of IANA time zone data" +category = "main" +optional = false +python-versions = ">=2" + +[[package]] +name = "tzlocal" +version = "4.2" +description = "tzinfo object for the local timezone" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pytz-deprecation-shim = "*" +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["black", "pyroma", "pytest-cov", "zest.releaser"] +test = ["pytest (>=4.3)", "pytest-mock (>=3.3)"] + +[[package]] +name = "unidecode" +version = "1.3.6" +description = "ASCII transliterations of Unicode text" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "urllib3" +version = "1.26.12" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "vine" +version = "5.0.0" +description = "Promises, promises, promises." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "virtualenv" +version = "20.16.6" +description = "Virtual Python Environment builder" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +distlib = ">=0.3.6,<1" +filelock = ">=3.4.1,<4" +platformdirs = ">=2.4,<3" + +[package.extras] +docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"] +testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "wcwidth" +version = "0.2.5" +description = "Measures the displayed width of unicode strings in a terminal" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "werkzeug" +version = "2.2.2" +description = "The comprehensive WSGI web application library." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog"] + +[[package]] +name = "wrapt" +version = "1.14.1" +description = "Module for decorators, wrappers and monkey patching." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[[package]] +name = "wtforms" +version = "3.0.1" +description = "Form validation and rendering for Python web development." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = "*" + +[package.extras] +email = ["email-validator"] + +[[package]] +name = "xdoctest" +version = "1.1.0" +description = "A rewrite of the builtin doctest module" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +colorama = {version = "*", optional = true, markers = "platform_system == \"Windows\" and extra == \"colors\""} +Pygments = {version = "*", optional = true, markers = "python_version >= \"3.5.0\" and extra == \"colors\""} +six = "*" + +[package.extras] +all = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "cmake", "codecov", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "six", "typing"] +all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "cmake (==3.21.2)", "codecov (==2.0.15)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "six (==1.11.0)", "typing (==3.7.4)"] +colors = ["Pygments", "Pygments", "colorama"] +jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert"] +optional = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "tomli"] +optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"] +runtime-strict = ["six (==1.11.0)"] +tests = ["cmake", "codecov", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "typing"] +tests-strict = ["cmake (==3.21.2)", "codecov (==2.0.15)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "typing (==3.7.4)"] + +[metadata] +lock-version = "1.1" +python-versions = ">=3.11,<3.12" +content-hash = "218d9e84c83ac2b9953fa5e18ee39879d2573fc749900887851be6d9ec32e63d" + +[metadata.files] +alabaster = [ + {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, + {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, +] +alembic = [ + {file = "alembic-1.8.1-py3-none-any.whl", hash = "sha256:0a024d7f2de88d738d7395ff866997314c837be6104e90c5724350313dee4da4"}, + {file = "alembic-1.8.1.tar.gz", hash = "sha256:cd0b5e45b14b706426b833f06369b9a6d5ee03f826ec3238723ce8caaf6e5ffa"}, +] +amqp = [ + {file = "amqp-5.1.1-py3-none-any.whl", hash = "sha256:6f0956d2c23d8fa6e7691934d8c3930eadb44972cbbd1a7ae3a520f735d43359"}, + {file = "amqp-5.1.1.tar.gz", hash = "sha256:2c1b13fecc0893e946c65cbd5f36427861cffa4ea2201d8f6fca22e2a373b5e2"}, +] +aniso8601 = [ + {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, + {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, +] +apscheduler = [ + {file = "APScheduler-3.10.0-py3-none-any.whl", hash = "sha256:575299f20073c60a2cc9d4fa5906024cdde33c5c0ce6087c4e3c14be3b50fdd4"}, + {file = "APScheduler-3.10.0.tar.gz", hash = "sha256:a49fc23269218416f0e41890eea7a75ed6b284f10630dcfe866ab659621a3696"}, +] +astroid = [ + {file = "astroid-2.12.12-py3-none-any.whl", hash = "sha256:72702205200b2a638358369d90c222d74ebc376787af8fb2f7f2a86f7b5cc85f"}, + {file = "astroid-2.12.12.tar.gz", hash = "sha256:1c00a14f5a3ed0339d38d2e2e5b74ea2591df5861c0936bb292b84ccf3a78d83"}, +] +attrs = [ + {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, + {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, +] +babel = [ + {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, + {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, +] +bandit = [ + {file = "bandit-1.7.2-py3-none-any.whl", hash = "sha256:e20402cadfd126d85b68ed4c8862959663c8c372dbbb1fca8f8e2c9f55a067ec"}, + {file = "bandit-1.7.2.tar.gz", hash = "sha256:6d11adea0214a43813887bfe71a377b5a9955e4c826c8ffd341b494e3ab25260"}, +] +bcrypt = [ + {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, + {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, + {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, + {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, + {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, + {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, +] +beautifulsoup4 = [ + {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, + {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, +] +billiard = [ + {file = "billiard-3.6.4.0-py3-none-any.whl", hash = "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b"}, + {file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"}, +] +black = [ + {file = "black-23.1a1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fb7641d442ede92538bc70fa0201f884753a7d0f62f26c722b7b00301b95902"}, + {file = "black-23.1a1-cp310-cp310-win_amd64.whl", hash = "sha256:88288a645402106b8eb9f50d7340ae741e16240bb01c2eed8466549153daa96e"}, + {file = "black-23.1a1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4db1d8027ce7ae53f0ccf02b0be0b8808fefb291d6cb1543420f4165d96d364c"}, + {file = "black-23.1a1-cp311-cp311-win_amd64.whl", hash = "sha256:88ec25a64063945b4591b6378bead544c5d3260de1c93ad96f3ad2d76ddd76fd"}, + {file = "black-23.1a1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dff6f0157e47fbbeada046fca144b6557d3be2fb2602d668881cd179f04a352"}, + {file = "black-23.1a1-cp37-cp37m-win_amd64.whl", hash = "sha256:ca658b69260a18bf7aa0b0a6562dbbd304a737487d1318998aaca5a75901fd2c"}, + {file = "black-23.1a1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85dede655442f5e246e7abd667fe07e14916897ba52f3640b5489bf11f7dbf67"}, + {file = "black-23.1a1-cp38-cp38-win_amd64.whl", hash = "sha256:ddbf9da228726d46f45c29024263e160d41030a415097254817d65127012d1a2"}, + {file = "black-23.1a1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63330069d8ec909cf4e2c4d43a7f00aeb03335430ef9fec6cd2328e6ebde8a77"}, + {file = "black-23.1a1-cp39-cp39-win_amd64.whl", hash = "sha256:793c9176beb2adf295f6b863d9a4dc953fe2ac359ca3da108d71d14cb2c09e52"}, + {file = "black-23.1a1-py3-none-any.whl", hash = "sha256:e88e4b633d64b9e7adc4a6b922f52bb204af9f90d7b1e3317e6490f2b598b1ea"}, + {file = "black-23.1a1.tar.gz", hash = "sha256:0b945a5a1e5a5321f884de0061d5a8585d947c9b608e37b6d26ceee4dfdf4b62"}, +] +blinker = [ + {file = "blinker-1.5-py2.py3-none-any.whl", hash = "sha256:1eb563df6fdbc39eeddc177d953203f99f097e9bf0e2b8f9f3cf18b6ca425e36"}, + {file = "blinker-1.5.tar.gz", hash = "sha256:923e5e2f69c155f2cc42dafbbd70e16e3fde24d2d4aa2ab72fbe386238892462"}, +] +celery = [ + {file = "celery-5.2.7-py3-none-any.whl", hash = "sha256:138420c020cd58d6707e6257b6beda91fd39af7afde5d36c6334d175302c0e14"}, + {file = "celery-5.2.7.tar.gz", hash = "sha256:fafbd82934d30f8a004f81e8f7a062e31413a23d444be8ee3326553915958c6d"}, +] +certifi = [ + {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, + {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, +] +cfgv = [ + {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, + {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, + {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, +] +classify-imports = [ + {file = "classify_imports-4.2.0-py2.py3-none-any.whl", hash = "sha256:dbbc264b70a470ed8c6c95976a11dfb8b7f63df44ed1af87328bbed2663f5161"}, + {file = "classify_imports-4.2.0.tar.gz", hash = "sha256:7abfb7ea92149b29d046bd34573d247ba6e68cc28100c801eba4af17964fc40e"}, +] +click = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] +click-didyoumean = [ + {file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"}, + {file = "click_didyoumean-0.3.0-py3-none-any.whl", hash = "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667"}, +] +click-plugins = [ + {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, + {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, +] +click-repl = [ + {file = "click-repl-0.2.0.tar.gz", hash = "sha256:cd12f68d745bf6151210790540b4cb064c7b13e571bc64b6957d98d120dacfd8"}, + {file = "click_repl-0.2.0-py3-none-any.whl", hash = "sha256:94b3fbbc9406a236f176e0506524b2937e4b23b6f4c0c0b2a0a83f8a64e9194b"}, +] +clickclick = [ + {file = "clickclick-20.10.2-py2.py3-none-any.whl", hash = "sha256:c8f33e6d9ec83f68416dd2136a7950125bd256ec39ccc9a85c6e280a16be2bb5"}, + {file = "clickclick-20.10.2.tar.gz", hash = "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c"}, +] +colorama = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] +configparser = [ + {file = "configparser-5.3.0-py3-none-any.whl", hash = "sha256:b065779fd93c6bf4cee42202fa4351b4bb842e96a3fb469440e484517a49b9fa"}, + {file = "configparser-5.3.0.tar.gz", hash = "sha256:8be267824b541c09b08db124917f48ab525a6c3e837011f3130781a224c57090"}, +] +connexion = [ + {file = "connexion-2.14.1-py2.py3-none-any.whl", hash = "sha256:f343717241b4c4802a694c38fee66fb1693c897fe4ea5a957fa9b3b07caf6394"}, + {file = "connexion-2.14.1.tar.gz", hash = "sha256:99aa5781e70a7b94f8ffae8cf89f309d49cdb811bbd65a8e2f2546f3b19a01e6"}, +] +coverage = [ {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, @@ -580,456 +2294,101 @@ files = [ {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, ] - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "darglint" -version = "1.8.1" -description = "A utility for ensuring Google-style docstrings stay up to date with the source code." -category = "dev" -optional = false -python-versions = ">=3.6,<4.0" -files = [ +darglint = [ {file = "darglint-1.8.1-py3-none-any.whl", hash = "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d"}, {file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"}, ] - -[[package]] -name = "distlib" -version = "0.3.6" -description = "Distribution utilities" -category = "dev" -optional = false -python-versions = "*" -files = [ +distlib = [ {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, ] - -[[package]] -name = "docutils" -version = "0.19" -description = "Docutils -- Python Documentation Utilities" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +docutils = [ {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, ] - -[[package]] -name = "dparse" -version = "0.6.2" -description = "A parser for Python dependency files" -category = "dev" -optional = false -python-versions = ">=3.5" -files = [ +dparse = [ {file = "dparse-0.6.2-py3-none-any.whl", hash = "sha256:8097076f1dd26c377f30d4745e6ec18fef42f3bf493933b842ac5bafad8c345f"}, {file = "dparse-0.6.2.tar.gz", hash = "sha256:d45255bda21f998bc7ddf2afd5e62505ba6134756ba2d42a84c56b0826614dfe"}, ] - -[package.dependencies] -packaging = "*" -toml = "*" - -[package.extras] -conda = ["pyyaml"] -pipenv = ["pipenv"] - -[[package]] -name = "ecdsa" -version = "0.18.0" -description = "ECDSA cryptographic signature library (pure python)" -category = "main" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ +ecdsa = [ {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"}, {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"}, ] - -[package.dependencies] -six = ">=1.9.0" - -[package.extras] -gmpy = ["gmpy"] -gmpy2 = ["gmpy2"] - -[[package]] -name = "filelock" -version = "3.8.0" -description = "A platform independent file lock." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +filelock = [ {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, ] - -[package.extras] -docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "flake8" -version = "4.0.1" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +flake8 = [ {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, ] - -[package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.8.0,<2.9.0" -pyflakes = ">=2.4.0,<2.5.0" - -[[package]] -name = "flake8-bandit" -version = "2.1.2" -description = "Automated security testing with bandit and flake8." -category = "dev" -optional = false -python-versions = "*" -files = [ +flake8-bandit = [ {file = "flake8_bandit-2.1.2.tar.gz", hash = "sha256:687fc8da2e4a239b206af2e54a90093572a60d0954f3054e23690739b0b0de3b"}, ] - -[package.dependencies] -bandit = "*" -flake8 = "*" -flake8-polyfill = "*" -pycodestyle = "*" - -[[package]] -name = "flake8-bugbear" -version = "22.10.27" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +flake8-bugbear = [ {file = "flake8-bugbear-22.10.27.tar.gz", hash = "sha256:a6708608965c9e0de5fff13904fed82e0ba21ac929fe4896459226a797e11cd5"}, {file = "flake8_bugbear-22.10.27-py3-none-any.whl", hash = "sha256:6ad0ab754507319060695e2f2be80e6d8977cfcea082293089a9226276bd825d"}, ] - -[package.dependencies] -attrs = ">=19.2.0" -flake8 = ">=3.0.0" - -[package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"] - -[[package]] -name = "flake8-docstrings" -version = "1.6.0" -description = "Extension for flake8 which uses pydocstyle to check docstrings" -category = "dev" -optional = false -python-versions = "*" -files = [ +flake8-docstrings = [ {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"}, ] - -[package.dependencies] -flake8 = ">=3" -pydocstyle = ">=2.1" - -[[package]] -name = "flake8-polyfill" -version = "1.0.2" -description = "Polyfill package for Flake8 plugins" -category = "dev" -optional = false -python-versions = "*" -files = [ +flake8-polyfill = [ {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"}, ] - -[package.dependencies] -flake8 = "*" - -[[package]] -name = "flake8-rst-docstrings" -version = "0.2.7" -description = "Python docstring reStructuredText (RST) validator" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +flake8-rst-docstrings = [ {file = "flake8-rst-docstrings-0.2.7.tar.gz", hash = "sha256:2740067ab9237559dd45a3434d8c987792c7b259ca563621a3b95efe201f5382"}, {file = "flake8_rst_docstrings-0.2.7-py3-none-any.whl", hash = "sha256:5d56075dce360bcc9c6775bfe7cb431aa395de600ca7e8d40580a28d50b2a803"}, ] - -[package.dependencies] -flake8 = ">=3.0.0" -pygments = "*" -restructuredtext-lint = "*" - -[[package]] -name = "flask" -version = "2.2.2" -description = "A simple framework for building complex web applications." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +flask = [ {file = "Flask-2.2.2-py3-none-any.whl", hash = "sha256:b9c46cc36662a7949f34b52d8ec7bb59c0d74ba08ba6cb9ce9adc1d8676d9526"}, {file = "Flask-2.2.2.tar.gz", hash = "sha256:642c450d19c4ad482f96729bd2a8f6d32554aa1e231f4f6b4e7e5264b16cca2b"}, ] - -[package.dependencies] -click = ">=8.0" -itsdangerous = ">=2.0" -Jinja2 = ">=3.0" -Werkzeug = ">=2.2.2" - -[package.extras] -async = ["asgiref (>=3.2)"] -dotenv = ["python-dotenv"] - -[[package]] -name = "flask-admin" -version = "1.6.0" -description = "Simple and extensible admin interface framework for Flask" -category = "main" -optional = false -python-versions = ">=3.6" -files = [ +flask-admin = [ {file = "Flask-Admin-1.6.0.tar.gz", hash = "sha256:424ffc79b7b0dfff051555686ea12e86e48dffacac14beaa319fb4502ac40988"}, ] - -[package.dependencies] -Flask = ">=0.7" -wtforms = "*" - -[package.extras] -aws = ["boto"] -azure = ["azure-storage-blob"] - -[[package]] -name = "flask-bcrypt" -version = "1.0.1" -description = "Brcrypt hashing for Flask." -category = "main" -optional = false -python-versions = "*" -files = [ +flask-bcrypt = [ {file = "Flask-Bcrypt-1.0.1.tar.gz", hash = "sha256:f07b66b811417ea64eb188ae6455b0b708a793d966e1a80ceec4a23bc42a4369"}, {file = "Flask_Bcrypt-1.0.1-py3-none-any.whl", hash = "sha256:062fd991dc9118d05ac0583675507b9fe4670e44416c97e0e6819d03d01f808a"}, ] - -[package.dependencies] -bcrypt = ">=3.1.1" -Flask = "*" - -[[package]] -name = "flask-bpmn" -version = "0.0.0" -description = "Flask Bpmn" -category = "main" -optional = false -python-versions = "^3.7" -files = [] -develop = false - -[package.dependencies] -click = "^8.0.1" -flask = "*" -flask-admin = "*" -flask-bcrypt = "*" -flask-cors = "*" -flask-mail = "*" -flask-marshmallow = "*" -flask-migrate = "*" -flask-restful = "*" -greenlet = "^2.0.1" -sentry-sdk = "*" -sphinx-autoapi = "^2.0.0" -spiffworkflow = "*" -werkzeug = "*" - -[package.source] -type = "git" -url = "https://github.com/sartography/flask-bpmn" -reference = "main" -resolved_reference = "c18306300f4312b8d36e0197fd6b62399180d0b1" - -[[package]] -name = "flask-cors" -version = "3.0.10" -description = "A Flask extension adding a decorator for CORS support" -category = "main" -optional = false -python-versions = "*" -files = [ +flask-bpmn = [] +flask-cors = [ {file = "Flask-Cors-3.0.10.tar.gz", hash = "sha256:b60839393f3b84a0f3746f6cdca56c1ad7426aa738b70d6c61375857823181de"}, {file = "Flask_Cors-3.0.10-py2.py3-none-any.whl", hash = "sha256:74efc975af1194fc7891ff5cd85b0f7478be4f7f59fe158102e91abb72bb4438"}, ] - -[package.dependencies] -Flask = ">=0.9" -Six = "*" - -[[package]] -name = "flask-mail" -version = "0.9.1" -description = "Flask extension for sending email" -category = "main" -optional = false -python-versions = "*" -files = [ +flask-mail = [ {file = "Flask-Mail-0.9.1.tar.gz", hash = "sha256:22e5eb9a940bf407bcf30410ecc3708f3c56cc44b29c34e1726fe85006935f41"}, ] - -[package.dependencies] -blinker = "*" -Flask = "*" - -[[package]] -name = "flask-marshmallow" -version = "0.14.0" -description = "Flask + marshmallow for beautiful APIs" -category = "main" -optional = false -python-versions = "*" -files = [ +flask-marshmallow = [ {file = "flask-marshmallow-0.14.0.tar.gz", hash = "sha256:bd01a6372cbe50e36f205cfff0fc5dab0b7b662c4c8b2c4fc06a3151b2950950"}, {file = "flask_marshmallow-0.14.0-py2.py3-none-any.whl", hash = "sha256:2adcd782b5a4a6c5ae3c96701f320d8ca6997995a52b2661093c56cc3ed24754"}, ] - -[package.dependencies] -Flask = "*" -marshmallow = ">=2.0.0" -six = ">=1.9.0" - -[package.extras] -dev = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] -docs = ["Sphinx (==3.2.1)", "marshmallow-sqlalchemy (>=0.13.0)", "sphinx-issues (==1.2.0)"] -lint = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "pre-commit (>=2.4,<3.0)"] -sqlalchemy = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)"] -tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pytest"] - -[[package]] -name = "flask-migrate" -version = "3.1.0" -description = "SQLAlchemy database migrations for Flask applications using Alembic." -category = "main" -optional = false -python-versions = ">=3.6" -files = [ +flask-migrate = [ {file = "Flask-Migrate-3.1.0.tar.gz", hash = "sha256:57d6060839e3a7f150eaab6fe4e726d9e3e7cffe2150fb223d73f92421c6d1d9"}, {file = "Flask_Migrate-3.1.0-py3-none-any.whl", hash = "sha256:a6498706241aba6be7a251078de9cf166d74307bca41a4ca3e403c9d39e2f897"}, ] - -[package.dependencies] -alembic = ">=0.7" -Flask = ">=0.9" -Flask-SQLAlchemy = ">=1.0" - -[[package]] -name = "flask-restful" -version = "0.3.9" -description = "Simple framework for creating REST APIs" -category = "main" -optional = false -python-versions = "*" -files = [ +flask-restful = [ {file = "Flask-RESTful-0.3.9.tar.gz", hash = "sha256:ccec650b835d48192138c85329ae03735e6ced58e9b2d9c2146d6c84c06fa53e"}, {file = "Flask_RESTful-0.3.9-py2.py3-none-any.whl", hash = "sha256:4970c49b6488e46c520b325f54833374dc2b98e211f1b272bd4b0c516232afe2"}, ] - -[package.dependencies] -aniso8601 = ">=0.82" -Flask = ">=0.8" -pytz = "*" -six = ">=1.3.0" - -[package.extras] -docs = ["sphinx"] - -[[package]] -name = "flask-sqlalchemy" -version = "3.0.2" -description = "Add SQLAlchemy support to your Flask application." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +flask-sqlalchemy = [ {file = "Flask-SQLAlchemy-3.0.2.tar.gz", hash = "sha256:16199f5b3ddfb69e0df2f52ae4c76aedbfec823462349dabb21a1b2e0a2b65e9"}, {file = "Flask_SQLAlchemy-3.0.2-py3-none-any.whl", hash = "sha256:7d0cd9cf73e64a996bb881a1ebd01633fc5a6d11c36ea27f7b5e251dc45476e7"}, ] - -[package.dependencies] -Flask = ">=2.2" -SQLAlchemy = ">=1.4.18" - -[[package]] -name = "furo" -version = "2022.9.29" -description = "A clean customisable Sphinx documentation theme." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +furo = [ {file = "furo-2022.9.29-py3-none-any.whl", hash = "sha256:559ee17999c0f52728481dcf6b1b0cf8c9743e68c5e3a18cb45a7992747869a9"}, {file = "furo-2022.9.29.tar.gz", hash = "sha256:d4238145629c623609c2deb5384f8d036e2a1ee2a101d64b67b4348112470dbd"}, ] - -[package.dependencies] -beautifulsoup4 = "*" -pygments = ">=2.7" -sphinx = ">=4.0,<6.0" -sphinx-basic-ng = "*" - -[[package]] -name = "gitdb" -version = "4.0.9" -description = "Git Object Database" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +gitdb = [ {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, ] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.29" -description = "GitPython is a python library used to interact with Git repositories" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +gitpython = [ {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, ] - -[package.dependencies] -gitdb = ">=4.0.1,<5" - -[[package]] -name = "greenlet" -version = "2.0.1" -description = "Lightweight in-process concurrent programming" -category = "main" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" -files = [ +greenlet = [ {file = "greenlet-2.0.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:9ed358312e63bf683b9ef22c8e442ef6c5c02973f0c2a939ec1d7b50c974015c"}, {file = "greenlet-2.0.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4f09b0010e55bec3239278f642a8a506b91034f03a4fb28289a7d448a67f1515"}, {file = "greenlet-2.0.1-cp27-cp27m-win32.whl", hash = "sha256:1407fe45246632d0ffb7a3f4a520ba4e6051fc2cbd61ba1f806900c27f47706a"}, @@ -1091,185 +2450,47 @@ files = [ {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, {file = "greenlet-2.0.1.tar.gz", hash = "sha256:42e602564460da0e8ee67cb6d7236363ee5e131aa15943b6670e44e5c2ed0f67"}, ] - -[package.extras] -docs = ["Sphinx", "docutils (<0.18)"] -test = ["faulthandler", "objgraph", "psutil"] - -[[package]] -name = "gunicorn" -version = "20.1.0" -description = "WSGI HTTP Server for UNIX" -category = "main" -optional = false -python-versions = ">=3.5" -files = [ +gunicorn = [ {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, ] - -[package.dependencies] -setuptools = ">=3.0" - -[package.extras] -eventlet = ["eventlet (>=0.24.1)"] -gevent = ["gevent (>=1.4.0)"] -setproctitle = ["setproctitle"] -tornado = ["tornado (>=0.2)"] - -[[package]] -name = "identify" -version = "2.5.7" -description = "File identification library for Python" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +identify = [ {file = "identify-2.5.7-py2.py3-none-any.whl", hash = "sha256:7a67b2a6208d390fd86fd04fb3def94a3a8b7f0bcbd1d1fcd6736f4defe26390"}, {file = "identify-2.5.7.tar.gz", hash = "sha256:5b8fd1e843a6d4bf10685dd31f4520a7f1c7d0e14e9bc5d34b1d6f111cabc011"}, ] - -[package.extras] -license = ["ukkonen"] - -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" -optional = false -python-versions = ">=3.5" -files = [ +idna = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] - -[[package]] -name = "imagesize" -version = "1.4.1" -description = "Getting image size from png/jpeg/jpeg2000/gif file" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ +imagesize = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] - -[[package]] -name = "inflection" -version = "0.5.1" -description = "A port of Ruby on Rails inflector to Python" -category = "main" -optional = false -python-versions = ">=3.5" -files = [ +inflection = [ {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, ] - -[[package]] -name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" -category = "main" -optional = false -python-versions = "*" -files = [ +iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] - -[[package]] -name = "itsdangerous" -version = "2.1.2" -description = "Safely pass data to untrusted environments and back." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +itsdangerous = [ {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, ] - -[[package]] -name = "jinja2" -version = "3.1.2" -description = "A very fast and expressive template engine." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +jinja2 = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "jsonschema" -version = "4.16.0" -description = "An implementation of JSON Schema validation for Python" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +jsonschema = [ {file = "jsonschema-4.16.0-py3-none-any.whl", hash = "sha256:9e74b8f9738d6a946d70705dc692b74b5429cd0960d58e79ffecfc43b2221eb9"}, {file = "jsonschema-4.16.0.tar.gz", hash = "sha256:165059f076eff6971bae5b742fc029a7b4ef3f9bcf04c14e4776a7605de14b23"}, ] - -[package.dependencies] -attrs = ">=17.4.0" -pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] - -[[package]] -name = "kombu" -version = "5.2.4" -description = "Messaging library for Python." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +kombu = [ {file = "kombu-5.2.4-py3-none-any.whl", hash = "sha256:8b213b24293d3417bcf0d2f5537b7f756079e3ea232a8386dcc89a59fd2361a4"}, {file = "kombu-5.2.4.tar.gz", hash = "sha256:37cee3ee725f94ea8bb173eaab7c1760203ea53bbebae226328600f9d2799610"}, ] - -[package.dependencies] -amqp = ">=5.0.9,<6.0.0" -vine = "*" - -[package.extras] -azureservicebus = ["azure-servicebus (>=7.0.0)"] -azurestoragequeues = ["azure-storage-queue"] -consul = ["python-consul (>=0.6.0)"] -librabbitmq = ["librabbitmq (>=2.0.0)"] -mongodb = ["pymongo (>=3.3.0,<3.12.1)"] -msgpack = ["msgpack"] -pyro = ["pyro4"] -qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] -redis = ["redis (>=3.4.1,!=4.0.0,!=4.0.1)"] -slmq = ["softlayer-messaging (>=1.0.3)"] -sqlalchemy = ["sqlalchemy"] -sqs = ["boto3 (>=1.9.12)", "pycurl (>=7.44.1,<7.45.0)", "urllib3 (>=1.26.7)"] -yaml = ["PyYAML (>=3.10)"] -zookeeper = ["kazoo (>=1.3.1)"] - -[[package]] -name = "lazy-object-proxy" -version = "1.8.0" -description = "A fast and thorough lazy object proxy." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +lazy-object-proxy = [ {file = "lazy-object-proxy-1.8.0.tar.gz", hash = "sha256:c219a00245af0f6fa4e95901ed28044544f50152840c5b6a3e7b2568db34d156"}, {file = "lazy_object_proxy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4fd031589121ad46e293629b39604031d354043bb5cdf83da4e93c2d7f3389fe"}, {file = "lazy_object_proxy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:b70d6e7a332eb0217e7872a73926ad4fdc14f846e85ad6749ad111084e76df25"}, @@ -1290,31 +2511,11 @@ files = [ {file = "lazy_object_proxy-1.8.0-pp38-pypy38_pp73-any.whl", hash = "sha256:7e1561626c49cb394268edd00501b289053a652ed762c58e1081224c8d881cec"}, {file = "lazy_object_proxy-1.8.0-pp39-pypy39_pp73-any.whl", hash = "sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8"}, ] - -[[package]] -name = "livereload" -version = "2.6.3" -description = "Python LiveReload is an awesome tool for web developers" -category = "dev" -optional = false -python-versions = "*" -files = [ +livereload = [ {file = "livereload-2.6.3-py2.py3-none-any.whl", hash = "sha256:ad4ac6f53b2d62bb6ce1a5e6e96f1f00976a32348afedcb4b6d68df2a1d346e4"}, {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, ] - -[package.dependencies] -six = "*" -tornado = {version = "*", markers = "python_version > \"2.7\""} - -[[package]] -name = "lxml" -version = "4.9.1" -description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" -files = [ +lxml = [ {file = "lxml-4.9.1-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:98cafc618614d72b02185ac583c6f7796202062c41d2eeecdf07820bad3295ed"}, {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c62e8dd9754b7debda0c5ba59d34509c4688f853588d75b53c3791983faa96fc"}, {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21fb3d24ab430fc538a96e9fbb9b150029914805d551deeac7d7822f64631dfc"}, @@ -1386,41 +2587,11 @@ files = [ {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:287605bede6bd36e930577c5925fcea17cb30453d96a7b4c63c14a257118dbb9"}, {file = "lxml-4.9.1.tar.gz", hash = "sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"}, ] - -[package.extras] -cssselect = ["cssselect (>=0.7)"] -html5 = ["html5lib"] -htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=0.29.7)"] - -[[package]] -name = "mako" -version = "1.2.3" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +mako = [ {file = "Mako-1.2.3-py3-none-any.whl", hash = "sha256:c413a086e38cd885088d5e165305ee8eed04e8b3f8f62df343480da0a385735f"}, {file = "Mako-1.2.3.tar.gz", hash = "sha256:7fde96466fcfeedb0eed94f187f20b23d85e4cb41444be0e542e2c8c65c396cd"}, ] - -[package.dependencies] -MarkupSafe = ">=0.9.2" - -[package.extras] -babel = ["Babel"] -lingua = ["lingua"] -testing = ["pytest"] - -[[package]] -name = "markupsafe" -version = "2.1.1" -description = "Safely add untrusted strings to HTML/XML markup." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +markupsafe = [ {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, @@ -1462,86 +2633,23 @@ files = [ {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, ] - -[[package]] -name = "marshmallow" -version = "3.18.0" -description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +marshmallow = [ {file = "marshmallow-3.18.0-py3-none-any.whl", hash = "sha256:35e02a3a06899c9119b785c12a22f4cda361745d66a71ab691fd7610202ae104"}, {file = "marshmallow-3.18.0.tar.gz", hash = "sha256:6804c16114f7fce1f5b4dadc31f4674af23317fcc7f075da21e35c1a35d781f7"}, ] - -[package.dependencies] -packaging = ">=17.0" - -[package.extras] -dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "mypy (==0.971)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "mypy (==0.971)", "pre-commit (>=2.4,<3.0)"] -tests = ["pytest", "pytz", "simplejson"] - -[[package]] -name = "marshmallow-enum" -version = "1.5.1" -description = "Enum field for Marshmallow" -category = "main" -optional = false -python-versions = "*" -files = [ +marshmallow-enum = [ {file = "marshmallow-enum-1.5.1.tar.gz", hash = "sha256:38e697e11f45a8e64b4a1e664000897c659b60aa57bfa18d44e226a9920b6e58"}, {file = "marshmallow_enum-1.5.1-py2.py3-none-any.whl", hash = "sha256:57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072"}, ] - -[package.dependencies] -marshmallow = ">=2.0.0" - -[[package]] -name = "marshmallow-sqlalchemy" -version = "0.28.1" -description = "SQLAlchemy integration with the marshmallow (de)serialization library" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +marshmallow-sqlalchemy = [ {file = "marshmallow-sqlalchemy-0.28.1.tar.gz", hash = "sha256:aa376747296780a56355e3067b9c8bf43a2a1c44ff985de82b3a5d9e161ca2b8"}, {file = "marshmallow_sqlalchemy-0.28.1-py2.py3-none-any.whl", hash = "sha256:dbb061c19375eca3a7d18358d2ca8bbaee825fc3000a3f114e2698282362b536"}, ] - -[package.dependencies] -marshmallow = ">=3.0.0" -packaging = ">=21.3" -SQLAlchemy = ">=1.3.0" - -[package.extras] -dev = ["flake8 (==4.0.1)", "flake8-bugbear (==22.7.1)", "pre-commit (>=2.0,<3.0)", "pytest", "pytest-lazy-fixture (>=0.6.2)", "tox"] -docs = ["alabaster (==0.7.12)", "sphinx (==4.4.0)", "sphinx-issues (==3.0.1)"] -lint = ["flake8 (==4.0.1)", "flake8-bugbear (==22.7.1)", "pre-commit (>=2.0,<3.0)"] -tests = ["pytest", "pytest-lazy-fixture (>=0.6.2)"] - -[[package]] -name = "mccabe" -version = "0.6.1" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = "*" -files = [ +mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] - -[[package]] -name = "mypy" -version = "0.982" -description = "Optional static typing for Python" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +mypy = [ {file = "mypy-0.982-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5085e6f442003fa915aeb0a46d4da58128da69325d8213b4b35cc7054090aed5"}, {file = "mypy-0.982-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41fd1cf9bc0e1c19b9af13a6580ccb66c381a5ee2cf63ee5ebab747a4badeba3"}, {file = "mypy-0.982-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f793e3dd95e166b66d50e7b63e69e58e88643d80a3dcc3bcd81368e0478b089c"}, @@ -1567,36 +2675,11 @@ files = [ {file = "mypy-0.982-py3-none-any.whl", hash = "sha256:1021c241e8b6e1ca5a47e4d52601274ac078a89845cfde66c6d5f769819ffa1d"}, {file = "mypy-0.982.tar.gz", hash = "sha256:85f7a343542dc8b1ed0a888cdd34dca56462654ef23aa673907305b260b3d746"}, ] - -[package.dependencies] -mypy-extensions = ">=0.4.3" -typing-extensions = ">=3.10" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -python2 = ["typed-ast (>=1.4.0,<2)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "main" -optional = false -python-versions = "*" -files = [ +mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] - -[[package]] -name = "mysql-connector-python" -version = "8.0.31" -description = "MySQL driver written in Python" -category = "main" -optional = false -python-versions = "*" -files = [ +mysql-connector-python = [ {file = "mysql-connector-python-8.0.31.tar.gz", hash = "sha256:0fbe8f5441ad781b4f65c54a10ac77c6a329591456607e042786528599519636"}, {file = "mysql_connector_python-8.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3e271d8de00d5e9f9bd4b212c8e23d2986dead0f20379010f3b274a3e24cbfcb"}, {file = "mysql_connector_python-8.0.31-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f3ee04a601f9cb90ace9618bbe2fa8e5bb59be3eb0c2bd8a5405fe69e05e446b"}, @@ -1624,174 +2707,47 @@ files = [ {file = "mysql_connector_python-8.0.31-cp39-cp39-win_amd64.whl", hash = "sha256:b2bbf443f6346e46c26a3e91dd96a428a1038f2d3c5e466541078479c64a1833"}, {file = "mysql_connector_python-8.0.31-py2.py3-none-any.whl", hash = "sha256:9be9c4dcae987a2a3f07b2ad984984c24f90887dbfab3c8a971e631ad4ca5ccf"}, ] - -[package.dependencies] -protobuf = ">=3.11.0,<=3.20.1" - -[package.extras] -compression = ["lz4 (>=2.1.6,<=3.1.3)", "zstandard (>=0.12.0,<=0.15.2)"] -dns-srv = ["dnspython (>=1.16.0,<=2.1.0)"] -gssapi = ["gssapi (>=1.6.9,<=1.8.1)"] - -[[package]] -name = "nodeenv" -version = "1.7.0" -description = "Node.js virtual environment builder" -category = "dev" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" -files = [ +nodeenv = [ {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, ] - -[package.dependencies] -setuptools = "*" - -[[package]] -name = "packaging" -version = "21.3" -description = "Core utilities for Python packages" -category = "main" -optional = false -python-versions = ">=3.6" -files = [ +packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" - -[[package]] -name = "pathspec" -version = "0.10.1" -description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +pathspec = [ {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, ] - -[[package]] -name = "pbr" -version = "5.11.0" -description = "Python Build Reasonableness" -category = "dev" -optional = false -python-versions = ">=2.6" -files = [ +pbr = [ {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, ] - -[[package]] -name = "pep8-naming" -version = "0.13.2" -description = "Check PEP-8 naming conventions, plugin for flake8" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +pep8-naming = [ {file = "pep8-naming-0.13.2.tar.gz", hash = "sha256:93eef62f525fd12a6f8c98f4dcc17fa70baae2f37fa1f73bec00e3e44392fa48"}, {file = "pep8_naming-0.13.2-py3-none-any.whl", hash = "sha256:59e29e55c478db69cffbe14ab24b5bd2cd615c0413edf790d47d3fb7ba9a4e23"}, ] - -[package.dependencies] -flake8 = ">=3.9.1" - -[[package]] -name = "platformdirs" -version = "2.5.2" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +platformdirs = [ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, ] - -[package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] - -[[package]] -name = "pluggy" -version = "1.0.0" -description = "plugin and hook calling mechanisms for python" -category = "main" -optional = false -python-versions = ">=3.6" -files = [ +pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pre-commit" -version = "2.20.0" -description = "A framework for managing and maintaining multi-language pre-commit hooks." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +pre-commit = [ {file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"}, {file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"}, ] - -[package.dependencies] -cfgv = ">=2.0.0" -identify = ">=1.0.0" -nodeenv = ">=0.11.1" -pyyaml = ">=5.1" -toml = "*" -virtualenv = ">=20.0.8" - -[[package]] -name = "pre-commit-hooks" -version = "4.3.0" -description = "Some out-of-the-box hooks for pre-commit." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +pre-commit-hooks = [ {file = "pre_commit_hooks-4.3.0-py2.py3-none-any.whl", hash = "sha256:9ccaf7c98794659d345080ee1ea0256a55ae059675045eebdbbc17c0be8c7e4b"}, {file = "pre_commit_hooks-4.3.0.tar.gz", hash = "sha256:fda598a4c834d030727e6a615722718b47510f4bed72df4c949f95ba9f5aaf88"}, ] - -[package.dependencies] -"ruamel.yaml" = ">=0.15" - -[[package]] -name = "prompt-toolkit" -version = "3.0.31" -description = "Library for building powerful interactive command lines in Python" -category = "main" -optional = false -python-versions = ">=3.6.2" -files = [ +prompt-toolkit = [ {file = "prompt_toolkit-3.0.31-py3-none-any.whl", hash = "sha256:9696f386133df0fc8ca5af4895afe5d78f5fcfe5258111c2a79a1c3e41ffa96d"}, {file = "prompt_toolkit-3.0.31.tar.gz", hash = "sha256:9ada952c9d1787f52ff6d5f3484d0b4df8952787c087edf6a1f7c2cb1ea88148"}, ] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "protobuf" -version = "3.20.1" -description = "Protocol Buffers" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +protobuf = [ {file = "protobuf-3.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996"}, {file = "protobuf-3.20.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3"}, {file = "protobuf-3.20.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde"}, @@ -1817,15 +2773,7 @@ files = [ {file = "protobuf-3.20.1-py2.py3-none-any.whl", hash = "sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388"}, {file = "protobuf-3.20.1.tar.gz", hash = "sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9"}, ] - -[[package]] -name = "psycopg2" -version = "2.9.5" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" -optional = false -python-versions = ">=3.6" -files = [ +psycopg2 = [ {file = "psycopg2-2.9.5-cp310-cp310-win32.whl", hash = "sha256:d3ef67e630b0de0779c42912fe2cbae3805ebaba30cda27fea2a3de650a9414f"}, {file = "psycopg2-2.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:4cb9936316d88bfab614666eb9e32995e794ed0f8f6b3b718666c22819c1d7ee"}, {file = "psycopg2-2.9.5-cp311-cp311-win32.whl", hash = "sha256:093e3894d2d3c592ab0945d9eba9d139c139664dcf83a1c440b8a7aa9bb21955"}, @@ -1840,117 +2788,35 @@ files = [ {file = "psycopg2-2.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:190d51e8c1b25a47484e52a79638a8182451d6f6dff99f26ad9bd81e5359a0fa"}, {file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"}, ] - -[[package]] -name = "pyasn1" -version = "0.4.8" -description = "ASN.1 types and codecs" -category = "main" -optional = false -python-versions = "*" -files = [ +pyasn1 = [ {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, ] - -[[package]] -name = "pycodestyle" -version = "2.8.0" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ +pycodestyle = [ {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, ] - -[[package]] -name = "pydocstyle" -version = "6.1.1" -description = "Python docstring style checker" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +pydocstyle = [ {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, ] - -[package.dependencies] -snowballstemmer = "*" - -[package.extras] -toml = ["toml"] - -[[package]] -name = "pyflakes" -version = "2.4.0" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ +pyflakes = [ {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, ] - -[[package]] -name = "pygments" -version = "2.13.0" -description = "Pygments is a syntax highlighting package written in Python." -category = "main" -optional = false -python-versions = ">=3.6" -files = [ +pygments = [ {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, ] - -[package.extras] -plugins = ["importlib-metadata"] - -[[package]] -name = "pyjwt" -version = "2.6.0" -description = "JSON Web Token implementation in Python" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +pyjwt = [ {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"}, {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"}, ] - -[package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] - -[[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" -optional = false -python-versions = ">=3.6.8" -files = [ +pyparsing = [ {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pyrsistent" -version = "0.18.1" -description = "Persistent/Functional/Immutable data structures" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +pyrsistent = [ {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, @@ -1973,182 +2839,43 @@ files = [ {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, ] - -[[package]] -name = "pytest" -version = "7.2.0" -description = "pytest: simple powerful testing with Python" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +pytest = [ {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, ] - -[package.dependencies] -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] - -[[package]] -name = "pytest-flask" -version = "1.2.0" -description = "A set of py.test fixtures to test Flask applications." -category = "main" -optional = false -python-versions = ">=3.5" -files = [ +pytest-flask = [ {file = "pytest-flask-1.2.0.tar.gz", hash = "sha256:46fde652f77777bf02dc91205aec4ce20cdf2acbbbd66a918ab91f5c14693d3d"}, {file = "pytest_flask-1.2.0-py3-none-any.whl", hash = "sha256:fe25b39ad0db09c3d1fe728edecf97ced85e774c775db259a6d25f0270a4e7c9"}, ] - -[package.dependencies] -Flask = "*" -pytest = ">=5.2" -Werkzeug = ">=0.7" - -[package.extras] -docs = ["Sphinx", "sphinx-rtd-theme"] - -[[package]] -name = "pytest-flask-sqlalchemy" -version = "1.1.0" -description = "A pytest plugin for preserving test isolation in Flask-SQlAlchemy using database transactions." -category = "main" -optional = false -python-versions = "*" -files = [ +pytest-flask-sqlalchemy = [ {file = "pytest-flask-sqlalchemy-1.1.0.tar.gz", hash = "sha256:db71a57b90435e5d854b21c37a2584056d6fc3ddb28c09d8d0a2546bd6e390ff"}, {file = "pytest_flask_sqlalchemy-1.1.0-py3-none-any.whl", hash = "sha256:b9f272d5c4092fcbe4a6284e402a37cad84f5b9be3c0bbe1a11927f24c99ff83"}, ] - -[package.dependencies] -Flask-SQLAlchemy = ">=2.3" -packaging = ">=14.1" -pytest = ">=3.2.1" -pytest-mock = ">=1.6.2" -SQLAlchemy = ">=1.2.2" - -[package.extras] -tests = ["psycopg2-binary", "pytest (>=6.0.1)", "pytest-postgresql (>=2.4.0,<4.0.0)"] - -[[package]] -name = "pytest-mock" -version = "3.10.0" -description = "Thin-wrapper around the mock package for easier use with pytest" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +pytest-mock = [ {file = "pytest-mock-3.10.0.tar.gz", hash = "sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f"}, {file = "pytest_mock-3.10.0-py3-none-any.whl", hash = "sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b"}, ] - -[package.dependencies] -pytest = ">=5.0" - -[package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] - -[[package]] -name = "python-jose" -version = "3.3.0" -description = "JOSE implementation in Python" -category = "main" -optional = false -python-versions = "*" -files = [ +python-jose = [ {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, ] - -[package.dependencies] -ecdsa = "!=0.15" -pyasn1 = "*" -rsa = "*" - -[package.extras] -cryptography = ["cryptography (>=3.4.0)"] -pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] -pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] - -[[package]] -name = "python-keycloak" -version = "2.6.0" -description = "python-keycloak is a Python package providing access to the Keycloak API." -category = "main" -optional = false -python-versions = ">=3.7,<4.0" -files = [ +python-keycloak = [ {file = "python-keycloak-2.6.0.tar.gz", hash = "sha256:08c530ff86f631faccb8033d9d9345cc3148cb2cf132ff7564f025292e4dbd96"}, {file = "python_keycloak-2.6.0-py3-none-any.whl", hash = "sha256:a1ce102b978beb56d385319b3ca20992b915c2c12d15a2d0c23f1104882f3fb6"}, ] - -[package.dependencies] -python-jose = ">=3.3.0,<4.0.0" -requests = ">=2.20.0,<3.0.0" -requests-toolbelt = ">=0.9.1,<0.10.0" -urllib3 = ">=1.26.0,<2.0.0" - -[package.extras] -docs = ["Sphinx (>=5.0.2,<6.0.0)", "alabaster (>=0.7.12,<0.8.0)", "commonmark (>=0.9.1,<0.10.0)", "m2r2 (>=0.3.2,<0.4.0)", "mock (>=4.0.3,<5.0.0)", "readthedocs-sphinx-ext (>=2.1.8,<3.0.0)", "recommonmark (>=0.7.1,<0.8.0)", "sphinx-autoapi (>=1.8.4,<2.0.0)", "sphinx-rtd-theme (>=1.0.0,<2.0.0)"] - -[[package]] -name = "pytz" -version = "2022.5" -description = "World timezone definitions, modern and historical" -category = "main" -optional = false -python-versions = "*" -files = [ +pytz = [ {file = "pytz-2022.5-py2.py3-none-any.whl", hash = "sha256:335ab46900b1465e714b4fda4963d87363264eb662aab5e65da039c25f1f5b22"}, {file = "pytz-2022.5.tar.gz", hash = "sha256:c4d88f472f54d615e9cd582a5004d1e5f624854a6a27a6211591c251f22a6914"}, ] - -[[package]] -name = "pytz-deprecation-shim" -version = "0.1.0.post0" -description = "Shims to make deprecation of pytz easier" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ +pytz-deprecation-shim = [ {file = "pytz_deprecation_shim-0.1.0.post0-py2.py3-none-any.whl", hash = "sha256:8314c9692a636c8eb3bda879b9f119e350e93223ae83e70e80c31675a0fdc1a6"}, {file = "pytz_deprecation_shim-0.1.0.post0.tar.gz", hash = "sha256:af097bae1b616dde5c5744441e2ddc69e74dfdcb0c263129610d85b87445a59d"}, ] - -[package.dependencies] -tzdata = {version = "*", markers = "python_version >= \"3.6\""} - -[[package]] -name = "pyupgrade" -version = "3.1.0" -description = "A tool to automatically upgrade syntax for newer versions." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +pyupgrade = [ {file = "pyupgrade-3.1.0-py2.py3-none-any.whl", hash = "sha256:77c6101a710be3e24804891e43388cedbee617258e93b09c8c5e58de08617758"}, {file = "pyupgrade-3.1.0.tar.gz", hash = "sha256:7a8d393d85e15e0e2753e90b7b2e173b9d29dfd71e61f93d93e985b242627ed3"}, ] - -[package.dependencies] -tokenize-rt = ">=3.2.0" - -[[package]] -name = "pyyaml" -version = "6.0" -description = "YAML parser and emitter for Python" -category = "main" -optional = false -python-versions = ">=3.6" -files = [ +pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, @@ -2190,205 +2917,46 @@ files = [ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] - -[[package]] -name = "reorder-python-imports" -version = "3.9.0" -description = "Tool for reordering python imports" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +reorder-python-imports = [ {file = "reorder_python_imports-3.9.0-py2.py3-none-any.whl", hash = "sha256:3f9c16e8781f54c944756d0d1eb34a8c863554f7a4eb3693f574fe19b1a29b56"}, {file = "reorder_python_imports-3.9.0.tar.gz", hash = "sha256:49292ed537829a6bece9fb3746fc1bbe98f52643be5de01a4e13680268a5b0ec"}, ] - -[package.dependencies] -classify-imports = ">=4.1" - -[[package]] -name = "requests" -version = "2.28.1" -description = "Python HTTP for Humans." -category = "main" -optional = false -python-versions = ">=3.7, <4" -files = [ +requests = [ {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, ] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-toolbelt" -version = "0.9.1" -description = "A utility belt for advanced users of python-requests" -category = "main" -optional = false -python-versions = "*" -files = [ +requests-toolbelt = [ {file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"}, {file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"}, ] - -[package.dependencies] -requests = ">=2.0.1,<3.0.0" - -[[package]] -name = "restrictedpython" -version = "6.0" -description = "RestrictedPython is a defined subset of the Python language which allows to provide a program input into a trusted environment." -category = "main" -optional = false -python-versions = ">=3.6, <3.12" -files = [ +restrictedpython = [ {file = "RestrictedPython-6.0-py3-none-any.whl", hash = "sha256:3479303f7bff48a7dedad76f96e7704993c5e86c5adbd67f607295d5352f0fb8"}, {file = "RestrictedPython-6.0.tar.gz", hash = "sha256:405cf0bd9eec2f19b1326b5f48228efe56d6590b4e91826b8cc3b2cd400a96ad"}, ] - -[package.extras] -docs = ["Sphinx", "sphinx-rtd-theme"] -test = ["pytest", "pytest-mock"] - -[[package]] -name = "restructuredtext-lint" -version = "1.4.0" -description = "reStructuredText linter" -category = "dev" -optional = false -python-versions = "*" -files = [ +restructuredtext-lint = [ {file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"}, ] - -[package.dependencies] -docutils = ">=0.11,<1.0" - -[[package]] -name = "rsa" -version = "4.9" -description = "Pure-Python RSA implementation" -category = "main" -optional = false -python-versions = ">=3.6,<4" -files = [ +rsa = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, ] - -[package.dependencies] -pyasn1 = ">=0.1.3" - -[[package]] -name = "ruamel-yaml" -version = "0.17.21" -description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -category = "dev" -optional = false -python-versions = ">=3" -files = [ +ruamel-yaml = [ {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, ] - -[package.extras] -docs = ["ryd"] -jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] - -[[package]] -name = "safety" -version = "2.3.1" -description = "Checks installed dependencies for known vulnerabilities and licenses." -category = "dev" -optional = false -python-versions = "*" -files = [ +safety = [ {file = "safety-2.3.1-py3-none-any.whl", hash = "sha256:8f098d12b607db2756886280e85c28ece8db1bba4f45fc5f981f4663217bd619"}, {file = "safety-2.3.1.tar.gz", hash = "sha256:6e6fcb7d4e8321098cf289f59b65051cafd3467f089c6e57c9f894ae32c23b71"}, ] - -[package.dependencies] -Click = ">=8.0.2" -dparse = ">=0.6.2" -packaging = ">=21.0" -requests = "*" -"ruamel.yaml" = ">=0.17.21" -setuptools = ">=19.3" - -[package.extras] -github = ["jinja2 (>=3.1.0)", "pygithub (>=1.43.3)"] -gitlab = ["python-gitlab (>=1.3.0)"] - -[[package]] -name = "sentry-sdk" -version = "1.10.1" -description = "Python client for Sentry (https://sentry.io)" -category = "main" -optional = false -python-versions = "*" -files = [ +sentry-sdk = [ {file = "sentry-sdk-1.10.1.tar.gz", hash = "sha256:105faf7bd7b7fa25653404619ee261527266b14103fe1389e0ce077bd23a9691"}, {file = "sentry_sdk-1.10.1-py2.py3-none-any.whl", hash = "sha256:06c0fa9ccfdc80d7e3b5d2021978d6eb9351fa49db9b5847cf4d1f2a473414ad"}, ] - -[package.dependencies] -certifi = "*" -urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} - -[package.extras] -aiohttp = ["aiohttp (>=3.5)"] -beam = ["apache-beam (>=2.12)"] -bottle = ["bottle (>=0.12.13)"] -celery = ["celery (>=3)"] -chalice = ["chalice (>=1.16.0)"] -django = ["django (>=1.8)"] -falcon = ["falcon (>=1.4)"] -fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)"] -httpx = ["httpx (>=0.16.0)"] -pure-eval = ["asttokens", "executing", "pure-eval"] -pyspark = ["pyspark (>=2.4.4)"] -quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] -rq = ["rq (>=0.6)"] -sanic = ["sanic (>=0.8)"] -sqlalchemy = ["sqlalchemy (>=1.2)"] -starlette = ["starlette (>=0.19.1)"] -tornado = ["tornado (>=5)"] - -[[package]] -name = "setuptools" -version = "65.5.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +setuptools = [ {file = "setuptools-65.5.0-py3-none-any.whl", hash = "sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356"}, {file = "setuptools-65.5.0.tar.gz", hash = "sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17"}, ] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "simplejson" -version = "3.17.6" -description = "Simple, fast, extensible JSON encoder/decoder for Python" -category = "main" -optional = false -python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ +simplejson = [ {file = "simplejson-3.17.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a"}, {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c"}, {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:0de783e9c2b87bdd75b57efa2b6260c24b94605b5c9843517577d40ee0c3cc8a"}, @@ -2451,293 +3019,68 @@ files = [ {file = "simplejson-3.17.6-cp39-cp39-win_amd64.whl", hash = "sha256:3fe87570168b2ae018391e2b43fbf66e8593a86feccb4b0500d134c998983ccc"}, {file = "simplejson-3.17.6.tar.gz", hash = "sha256:cf98038d2abf63a1ada5730e91e84c642ba6c225b0198c3684151b1f80c5f8a6"}, ] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ +six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] - -[[package]] -name = "smmap" -version = "5.0.0" -description = "A pure Python implementation of a sliding window memory map manager" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +smmap = [ {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, ] - -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "main" -optional = false -python-versions = "*" -files = [ +snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] - -[[package]] -name = "soupsieve" -version = "2.3.2.post1" -description = "A modern CSS selector implementation for Beautiful Soup." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +soupsieve = [ {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, ] - -[[package]] -name = "sphinx" -version = "5.3.0" -description = "Python documentation generator" -category = "main" -optional = false -python-versions = ">=3.6" -files = [ +sphinx = [ {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, ] - -[package.dependencies] -alabaster = ">=0.7,<0.8" -babel = ">=2.9" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.14,<0.20" -imagesize = ">=1.3" -Jinja2 = ">=3.0" -packaging = ">=21.0" -Pygments = ">=2.12" -requests = ">=2.5.0" -snowballstemmer = ">=2.0" -sphinxcontrib-applehelp = "*" -sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = ">=2.0.0" -sphinxcontrib-jsmath = "*" -sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = ">=1.1.5" - -[package.extras] -docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] - -[[package]] -name = "sphinx-autoapi" -version = "2.0.0" -description = "Sphinx API documentation generator" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +sphinx-autoapi = [ {file = "sphinx-autoapi-2.0.0.tar.gz", hash = "sha256:97dcf1b5b54cd0d8efef867594e4a4f3e2d3a2c0ec1e5a891e0a61bc77046006"}, {file = "sphinx_autoapi-2.0.0-py2.py3-none-any.whl", hash = "sha256:dab2753a38cad907bf4e61473c0da365a26bfbe69fbf5aa6e4f7d48e1cf8a148"}, ] - -[package.dependencies] -astroid = ">=2.7" -Jinja2 = "*" -PyYAML = "*" -sphinx = ">=4.0" -unidecode = "*" - -[package.extras] -docs = ["sphinx", "sphinx-rtd-theme"] -dotnet = ["sphinxcontrib-dotnetdomain"] -go = ["sphinxcontrib-golangdomain"] - -[[package]] -name = "sphinx-autobuild" -version = "2021.3.14" -description = "Rebuild Sphinx documentation on changes, with live-reload in the browser." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +sphinx-autobuild = [ {file = "sphinx-autobuild-2021.3.14.tar.gz", hash = "sha256:de1ca3b66e271d2b5b5140c35034c89e47f263f2cd5db302c9217065f7443f05"}, {file = "sphinx_autobuild-2021.3.14-py3-none-any.whl", hash = "sha256:8fe8cbfdb75db04475232f05187c776f46f6e9e04cacf1e49ce81bdac649ccac"}, ] - -[package.dependencies] -colorama = "*" -livereload = "*" -sphinx = "*" - -[package.extras] -test = ["pytest", "pytest-cov"] - -[[package]] -name = "sphinx-basic-ng" -version = "1.0.0b1" -description = "A modern skeleton for Sphinx themes." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +sphinx-basic-ng = [ {file = "sphinx_basic_ng-1.0.0b1-py3-none-any.whl", hash = "sha256:ade597a3029c7865b24ad0eda88318766bcc2f9f4cef60df7e28126fde94db2a"}, {file = "sphinx_basic_ng-1.0.0b1.tar.gz", hash = "sha256:89374bd3ccd9452a301786781e28c8718e99960f2d4f411845ea75fc7bb5a9b0"}, ] - -[package.dependencies] -sphinx = ">=4.0" - -[package.extras] -docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"] - -[[package]] -name = "sphinx-click" -version = "4.3.0" -description = "Sphinx extension that automatically documents click applications" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sphinx-click-4.3.0.tar.gz", hash = "sha256:bd4db5d3c1bec345f07af07b8e28a76cfc5006d997984e38ae246bbf8b9a3b38"}, - {file = "sphinx_click-4.3.0-py3-none-any.whl", hash = "sha256:23e85a3cb0b728a421ea773699f6acadefae171d1a764a51dd8ec5981503ccbe"}, +sphinx-click = [ + {file = "sphinx-click-4.4.0.tar.gz", hash = "sha256:cc67692bd28f482c7f01531c61b64e9d2f069bfcf3d24cbbb51d4a84a749fa48"}, + {file = "sphinx_click-4.4.0-py3-none-any.whl", hash = "sha256:2821c10a68fc9ee6ce7c92fad26540d8d8c8f45e6d7258f0e4fb7529ae8fab49"}, ] - -[package.dependencies] -click = ">=7.0" -docutils = "*" -sphinx = ">=2.0" - -[[package]] -name = "sphinxcontrib-applehelp" -version = "1.0.2" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" -category = "main" -optional = false -python-versions = ">=3.5" -files = [ +sphinxcontrib-applehelp = [ {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, ] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "1.0.2" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -category = "main" -optional = false -python-versions = ">=3.5" -files = [ +sphinxcontrib-devhelp = [ {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, ] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "2.0.0" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "main" -optional = false -python-versions = ">=3.6" -files = [ +sphinxcontrib-htmlhelp = [ {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, ] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["html5lib", "pytest"] - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -description = "A sphinx extension which renders display math in HTML via JavaScript" -category = "main" -optional = false -python-versions = ">=3.5" -files = [ +sphinxcontrib-jsmath = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, ] - -[package.extras] -test = ["flake8", "mypy", "pytest"] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "1.0.3" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -category = "main" -optional = false -python-versions = ">=3.5" -files = [ +sphinxcontrib-qthelp = [ {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, ] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "1.1.5" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -category = "main" -optional = false -python-versions = ">=3.5" -files = [ +sphinxcontrib-serializinghtml = [ {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, ] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "SpiffWorkflow" -version = "1.2.1" -description = "A workflow framework and BPMN/DMN Processor" -category = "main" -optional = false -python-versions = "*" -files = [] -develop = false - -[package.dependencies] -celery = "*" -configparser = "*" -lxml = "*" - -[package.source] -type = "git" -url = "https://github.com/sartography/SpiffWorkflow" -reference = "main" -resolved_reference = "2ca6ebf800d4ff1d54f3e1c48798a2cb879560f7" - -[[package]] -name = "sqlalchemy" -version = "1.4.42" -description = "Database Abstraction Library" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ +SpiffWorkflow = [] +sqlalchemy = [ {file = "SQLAlchemy-1.4.42-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:28e881266a172a4d3c5929182fde6bb6fba22ac93f137d5380cc78a11a9dd124"}, {file = "SQLAlchemy-1.4.42-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ca9389a00f639383c93ed00333ed763812f80b5ae9e772ea32f627043f8c9c88"}, {file = "SQLAlchemy-1.4.42-cp27-cp27m-win32.whl", hash = "sha256:1d0c23ecf7b3bc81e29459c34a3f4c68ca538de01254e24718a7926810dc39a6"}, @@ -2780,125 +3123,28 @@ files = [ {file = "SQLAlchemy-1.4.42-cp39-cp39-win_amd64.whl", hash = "sha256:5f966b64c852592469a7eb759615bbd351571340b8b344f1d3fa2478b5a4c934"}, {file = "SQLAlchemy-1.4.42.tar.gz", hash = "sha256:177e41914c476ed1e1b77fd05966ea88c094053e17a85303c4ce007f88eff363"}, ] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} - -[package.extras] -aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] -mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3-binary"] - -[[package]] -name = "sqlalchemy-stubs" -version = "0.4" -description = "" -category = "main" -optional = false -python-versions = "*" -files = [] -develop = false - -[package.dependencies] -mypy = ">=0.790" -typing-extensions = ">=3.7.4" - -[package.source] -type = "git" -url = "https://github.com/burnettk/sqlalchemy-stubs.git" -reference = "scoped-session-delete" -resolved_reference = "d1176931684ce5b327539cc9567d4a1cd8ef1efd" - -[[package]] -name = "stevedore" -version = "4.1.0" -description = "Manage dynamic plugins for Python applications" -category = "dev" -optional = false -python-versions = ">=3.8" -files = [ +sqlalchemy-stubs = [] +stevedore = [ {file = "stevedore-4.1.0-py3-none-any.whl", hash = "sha256:3b1cbd592a87315f000d05164941ee5e164899f8fc0ce9a00bb0f321f40ef93e"}, {file = "stevedore-4.1.0.tar.gz", hash = "sha256:02518a8f0d6d29be8a445b7f2ac63753ff29e8f2a2faa01777568d5500d777a6"}, ] - -[package.dependencies] -pbr = ">=2.0.0,<2.1.0 || >2.1.0" - -[[package]] -name = "swagger-ui-bundle" -version = "0.0.9" -description = "swagger_ui_bundle - swagger-ui files in a pip package" -category = "main" -optional = false -python-versions = "*" -files = [ +swagger-ui-bundle = [ {file = "swagger_ui_bundle-0.0.9-py3-none-any.whl", hash = "sha256:cea116ed81147c345001027325c1ddc9ca78c1ee7319935c3c75d3669279d575"}, {file = "swagger_ui_bundle-0.0.9.tar.gz", hash = "sha256:b462aa1460261796ab78fd4663961a7f6f347ce01760f1303bbbdf630f11f516"}, ] - -[package.dependencies] -Jinja2 = ">=2.0" - -[[package]] -name = "tokenize-rt" -version = "5.0.0" -description = "A wrapper around the stdlib `tokenize` which roundtrips." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +tokenize-rt = [ {file = "tokenize_rt-5.0.0-py2.py3-none-any.whl", hash = "sha256:c67772c662c6b3dc65edf66808577968fb10badfc2042e3027196bed4daf9e5a"}, {file = "tokenize_rt-5.0.0.tar.gz", hash = "sha256:3160bc0c3e8491312d0485171dea861fc160a240f5f5766b72a1165408d10740"}, ] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ +toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] - -[[package]] -name = "tornado" -version = "6.2" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "dev" -optional = false -python-versions = ">= 3.7" -files = [ +tornado = [ {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"}, {file = "tornado-6.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba09ef14ca9893954244fd872798b4ccb2367c165946ce2dd7376aebdde8e3ac"}, @@ -2911,286 +3157,83 @@ files = [ {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"}, {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"}, ] - -[[package]] -name = "typeguard" -version = "2.13.3" -description = "Run-time type checker for Python" -category = "dev" -optional = false -python-versions = ">=3.5.3" -files = [ +typeguard = [ {file = "typeguard-2.13.3-py3-none-any.whl", hash = "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1"}, {file = "typeguard-2.13.3.tar.gz", hash = "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4"}, ] - -[package.extras] -doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["mypy", "pytest", "typing-extensions"] - -[[package]] -name = "types-click" -version = "7.1.8" -description = "Typing stubs for click" -category = "main" -optional = false -python-versions = "*" -files = [ +types-click = [ {file = "types-click-7.1.8.tar.gz", hash = "sha256:b6604968be6401dc516311ca50708a0a28baa7a0cb840efd7412f0dbbff4e092"}, {file = "types_click-7.1.8-py3-none-any.whl", hash = "sha256:8cb030a669e2e927461be9827375f83c16b8178c365852c060a34e24871e7e81"}, ] - -[[package]] -name = "types-flask" -version = "1.1.6" -description = "Typing stubs for Flask" -category = "main" -optional = false -python-versions = "*" -files = [ +types-flask = [ {file = "types-Flask-1.1.6.tar.gz", hash = "sha256:aac777b3abfff9436e6b01f6d08171cf23ea6e5be71cbf773aaabb1c5763e9cf"}, {file = "types_Flask-1.1.6-py3-none-any.whl", hash = "sha256:6ab8a9a5e258b76539d652f6341408867298550b19b81f0e41e916825fc39087"}, ] - -[package.dependencies] -types-click = "*" -types-Jinja2 = "*" -types-Werkzeug = "*" - -[[package]] -name = "types-jinja2" -version = "2.11.9" -description = "Typing stubs for Jinja2" -category = "main" -optional = false -python-versions = "*" -files = [ +types-jinja2 = [ {file = "types-Jinja2-2.11.9.tar.gz", hash = "sha256:dbdc74a40aba7aed520b7e4d89e8f0fe4286518494208b35123bcf084d4b8c81"}, {file = "types_Jinja2-2.11.9-py3-none-any.whl", hash = "sha256:60a1e21e8296979db32f9374d8a239af4cb541ff66447bb915d8ad398f9c63b2"}, ] - -[package.dependencies] -types-MarkupSafe = "*" - -[[package]] -name = "types-markupsafe" -version = "1.1.10" -description = "Typing stubs for MarkupSafe" -category = "main" -optional = false -python-versions = "*" -files = [ +types-markupsafe = [ {file = "types-MarkupSafe-1.1.10.tar.gz", hash = "sha256:85b3a872683d02aea3a5ac2a8ef590193c344092032f58457287fbf8e06711b1"}, {file = "types_MarkupSafe-1.1.10-py3-none-any.whl", hash = "sha256:ca2bee0f4faafc45250602567ef38d533e877d2ddca13003b319c551ff5b3cc5"}, ] - -[[package]] -name = "types-pytz" -version = "2022.5.0.0" -description = "Typing stubs for pytz" -category = "main" -optional = false -python-versions = "*" -files = [ +types-pytz = [ {file = "types-pytz-2022.5.0.0.tar.gz", hash = "sha256:0c163b15d3e598e6cc7074a99ca9ec72b25dc1b446acc133b827667af0b7b09a"}, {file = "types_pytz-2022.5.0.0-py3-none-any.whl", hash = "sha256:a8e1fe6a1b270fbfaf2553b20ad0f1316707cc320e596da903bb17d7373fed2d"}, ] - -[[package]] -name = "types-pyyaml" -version = "6.0.12.1" -description = "Typing stubs for PyYAML" -category = "main" -optional = false -python-versions = "*" -files = [ +types-pyyaml = [ {file = "types-PyYAML-6.0.12.1.tar.gz", hash = "sha256:70ccaafcf3fb404d57bffc1529fdd86a13e8b4f2cf9fc3ee81a6408ce0ad59d2"}, {file = "types_PyYAML-6.0.12.1-py3-none-any.whl", hash = "sha256:aaf5e51444c13bd34104695a89ad9c48412599a4f615d65a60e649109714f608"}, ] - -[[package]] -name = "types-requests" -version = "2.28.11.2" -description = "Typing stubs for requests" -category = "main" -optional = false -python-versions = "*" -files = [ +types-requests = [ {file = "types-requests-2.28.11.2.tar.gz", hash = "sha256:fdcd7bd148139fb8eef72cf4a41ac7273872cad9e6ada14b11ff5dfdeee60ed3"}, {file = "types_requests-2.28.11.2-py3-none-any.whl", hash = "sha256:14941f8023a80b16441b3b46caffcbfce5265fd14555844d6029697824b5a2ef"}, ] - -[package.dependencies] -types-urllib3 = "<1.27" - -[[package]] -name = "types-urllib3" -version = "1.26.25.1" -description = "Typing stubs for urllib3" -category = "main" -optional = false -python-versions = "*" -files = [ +types-urllib3 = [ {file = "types-urllib3-1.26.25.1.tar.gz", hash = "sha256:a948584944b2412c9a74b9cf64f6c48caf8652cb88b38361316f6d15d8a184cd"}, {file = "types_urllib3-1.26.25.1-py3-none-any.whl", hash = "sha256:f6422596cc9ee5fdf68f9d547f541096a20c2dcfd587e37c804c9ea720bf5cb2"}, ] - -[[package]] -name = "types-werkzeug" -version = "1.0.9" -description = "Typing stubs for Werkzeug" -category = "main" -optional = false -python-versions = "*" -files = [ +types-werkzeug = [ {file = "types-Werkzeug-1.0.9.tar.gz", hash = "sha256:5cc269604c400133d452a40cee6397655f878fc460e03fde291b9e3a5eaa518c"}, {file = "types_Werkzeug-1.0.9-py3-none-any.whl", hash = "sha256:194bd5715a13c598f05c63e8a739328657590943bce941e8a3619a6b5d4a54ec"}, ] - -[[package]] -name = "typing-extensions" -version = "4.4.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +typing-extensions = [ {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, ] - -[[package]] -name = "tzdata" -version = "2022.5" -description = "Provider of IANA time zone data" -category = "main" -optional = false -python-versions = ">=2" -files = [ +tzdata = [ {file = "tzdata-2022.5-py2.py3-none-any.whl", hash = "sha256:323161b22b7802fdc78f20ca5f6073639c64f1a7227c40cd3e19fd1d0ce6650a"}, {file = "tzdata-2022.5.tar.gz", hash = "sha256:e15b2b3005e2546108af42a0eb4ccab4d9e225e2dfbf4f77aad50c70a4b1f3ab"}, ] - -[[package]] -name = "tzlocal" -version = "4.2" -description = "tzinfo object for the local timezone" -category = "main" -optional = false -python-versions = ">=3.6" -files = [ +tzlocal = [ {file = "tzlocal-4.2-py3-none-any.whl", hash = "sha256:89885494684c929d9191c57aa27502afc87a579be5cdd3225c77c463ea043745"}, {file = "tzlocal-4.2.tar.gz", hash = "sha256:ee5842fa3a795f023514ac2d801c4a81d1743bbe642e3940143326b3a00addd7"}, ] - -[package.dependencies] -pytz-deprecation-shim = "*" -tzdata = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -devenv = ["black", "pyroma", "pytest-cov", "zest.releaser"] -test = ["pytest (>=4.3)", "pytest-mock (>=3.3)"] - -[[package]] -name = "unidecode" -version = "1.3.6" -description = "ASCII transliterations of Unicode text" -category = "main" -optional = false -python-versions = ">=3.5" -files = [ +unidecode = [ {file = "Unidecode-1.3.6-py3-none-any.whl", hash = "sha256:547d7c479e4f377b430dd91ac1275d593308dce0fc464fb2ab7d41f82ec653be"}, {file = "Unidecode-1.3.6.tar.gz", hash = "sha256:fed09cf0be8cf415b391642c2a5addfc72194407caee4f98719e40ec2a72b830"}, ] - -[[package]] -name = "urllib3" -version = "1.26.12" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" -files = [ +urllib3 = [ {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, ] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "vine" -version = "5.0.0" -description = "Promises, promises, promises." -category = "main" -optional = false -python-versions = ">=3.6" -files = [ +vine = [ {file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"}, {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, ] - -[[package]] -name = "virtualenv" -version = "20.16.6" -description = "Virtual Python Environment builder" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +virtualenv = [ {file = "virtualenv-20.16.6-py3-none-any.whl", hash = "sha256:186ca84254abcbde98180fd17092f9628c5fe742273c02724972a1d8a2035108"}, {file = "virtualenv-20.16.6.tar.gz", hash = "sha256:530b850b523c6449406dfba859d6345e48ef19b8439606c5d74d7d3c9e14d76e"}, ] - -[package.dependencies] -distlib = ">=0.3.6,<1" -filelock = ">=3.4.1,<4" -platformdirs = ">=2.4,<3" - -[package.extras] -docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"] -testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "wcwidth" -version = "0.2.5" -description = "Measures the displayed width of unicode strings in a terminal" -category = "main" -optional = false -python-versions = "*" -files = [ +wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, ] - -[[package]] -name = "werkzeug" -version = "2.2.2" -description = "The comprehensive WSGI web application library." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +werkzeug = [ {file = "Werkzeug-2.2.2-py3-none-any.whl", hash = "sha256:f979ab81f58d7318e064e99c4506445d60135ac5cd2e177a2de0089bfd4c9bd5"}, {file = "Werkzeug-2.2.2.tar.gz", hash = "sha256:7ea2d48322cc7c0f8b3a215ed73eabd7b5d75d0b50e31ab006286ccff9e00b8f"}, ] - -[package.dependencies] -MarkupSafe = ">=2.1.1" - -[package.extras] -watchdog = ["watchdog"] - -[[package]] -name = "wrapt" -version = "1.14.1" -description = "Module for decorators, wrappers and monkey patching." -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -files = [ +wrapt = [ {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, @@ -3256,54 +3299,11 @@ files = [ {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, ] - -[[package]] -name = "wtforms" -version = "3.0.1" -description = "Form validation and rendering for Python web development." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +wtforms = [ {file = "WTForms-3.0.1-py3-none-any.whl", hash = "sha256:837f2f0e0ca79481b92884962b914eba4e72b7a2daaf1f939c890ed0124b834b"}, {file = "WTForms-3.0.1.tar.gz", hash = "sha256:6b351bbb12dd58af57ffef05bc78425d08d1914e0fd68ee14143b7ade023c5bc"}, ] - -[package.dependencies] -MarkupSafe = "*" - -[package.extras] -email = ["email-validator"] - -[[package]] -name = "xdoctest" -version = "1.1.0" -description = "A rewrite of the builtin doctest module" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +xdoctest = [ {file = "xdoctest-1.1.0-py3-none-any.whl", hash = "sha256:da330c4dacee51f3c785820bc743188fb6f7c64c5fa1c54bff8836b3cf23d69b"}, {file = "xdoctest-1.1.0.tar.gz", hash = "sha256:0fd4fad7932f0a2f082dfdfb857dd6ca41603757586c39b1e5b4d333fc389f8a"}, ] - -[package.dependencies] -colorama = {version = "*", optional = true, markers = "platform_system == \"Windows\" and extra == \"colors\""} -Pygments = {version = "*", optional = true, markers = "python_version >= \"3.5.0\" and extra == \"colors\""} -six = "*" - -[package.extras] -all = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "cmake", "codecov", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "six", "typing"] -all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "cmake (==3.21.2)", "codecov (==2.0.15)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "six (==1.11.0)", "typing (==3.7.4)"] -colors = ["Pygments", "Pygments", "colorama"] -jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert"] -optional = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "tomli"] -optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"] -runtime-strict = ["six (==1.11.0)"] -tests = ["cmake", "codecov", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "typing"] -tests-strict = ["cmake (==3.21.2)", "codecov (==2.0.15)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "typing (==3.7.4)"] - -[metadata] -lock-version = "2.0" -python-versions = ">=3.11,<3.12" -content-hash = "218d9e84c83ac2b9953fa5e18ee39879d2573fc749900887851be6d9ec32e63d" From ed1b45c4537475fead662512f7d2727c2370b313 Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 16 Mar 2023 23:43:59 -0400 Subject: [PATCH 044/162] see if this still works --- .github/workflows/docker_image_for_main_builds.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/docker_image_for_main_builds.yml b/.github/workflows/docker_image_for_main_builds.yml index acaf2c90..ec2da085 100644 --- a/.github/workflows/docker_image_for_main_builds.yml +++ b/.github/workflows/docker_image_for_main_builds.yml @@ -54,7 +54,7 @@ jobs: - name: Get current date id: date - run: echo "date=$(date -u +'%Y-%m-%d_%H-%M-%S')" >> $GITHUB_OUTPUT + run: echo "date=$(date -u +'%Y-%m-%d_%H-%M-%S')" >> "$GITHUB_OUTPUT" - name: Extract metadata (tags, labels) for Docker id: meta uses: docker/metadata-action@v4.3.0 @@ -72,8 +72,8 @@ jobs: push: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - - name: Adding markdown - run: echo 'TAGS ${{ steps.meta.outputs.tags }}' >> $GITHUB_STEP_SUMMARY + - run: echo 'TAGS' >> "$GITHUB_STEP_SUMMARY" + - run: echo 'for tag in ${{ steps.meta.outputs.tags }}; do echo "* $tag"; done' >> "$GITHUB_STEP_SUMMARY" create_backend_docker_image: runs-on: ubuntu-latest @@ -95,7 +95,7 @@ jobs: - name: Get current date id: date - run: echo "date=$(date -u +'%Y-%m-%d_%H-%M-%S')" >> $GITHUB_OUTPUT + run: echo "date=$(date -u +'%Y-%m-%d_%H-%M-%S')" >> "$GITHUB_OUTPUT" - name: Extract metadata (tags, labels) for Docker id: meta uses: docker/metadata-action@v4.3.0 @@ -114,4 +114,4 @@ jobs: tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - name: Adding markdown - run: echo 'TAGS ${{ steps.meta.outputs.tags }}' >> $GITHUB_STEP_SUMMARY + run: echo 'TAGS ${{ steps.meta.outputs.tags }}' >> "$GITHUB_STEP_SUMMARY" From 121793307bdf36df19bb21b880ef5c7ad5358c19 Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 17 Mar 2023 09:42:38 -0400 Subject: [PATCH 045/162] add user --- .../realm_exports/spiffworkflow-realm.json | 73 ++++++++++++------- .../keycloak/test_user_lists/status | 1 + 2 files changed, 49 insertions(+), 25 deletions(-) diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index d44353b8..99e651b9 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -396,7 +396,7 @@ "otpPolicyLookAheadWindow" : 1, "otpPolicyPeriod" : 30, "otpPolicyCodeReusable" : false, - "otpSupportedApplications" : [ "totpAppGoogleName", "totpAppFreeOTPName" ], + "otpSupportedApplications" : [ "totpAppFreeOTPName", "totpAppGoogleName" ], "webAuthnPolicyRpEntityName" : "keycloak", "webAuthnPolicySignatureAlgorithms" : [ "ES256" ], "webAuthnPolicyRpId" : "", @@ -991,6 +991,29 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "7b86b997-de98-478c-8550-cfca65e40c33", + "createdTimestamp" : 1679060366901, + "username" : "core18.contributor", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "core18.contributor@status.im", + "attributes" : { + "spiffworkflow-employeeid" : [ "233" ] + }, + "credentials" : [ { + "id" : "55ca2bd7-6f60-4f04-be21-df6300ca9442", + "type" : "password", + "createdDate" : 1679060366954, + "secretData" : "{\"value\":\"hC/O8LJ8/y/nXLmRFgRazOX9PXMHkowYH1iHUB4Iw9jzc8IMMv8dFrxu7XBklfyz7CPc1bmgl0k29jygRZYHlg==\",\"salt\":\"4R17tmLrHWyFAMvrfLMETQ==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "3b81b45e-759b-4d7a-aa90-adf7b447208c", "createdTimestamp" : 1676302140358, @@ -4601,7 +4624,7 @@ "subType" : "authenticated", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "saml-role-list-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-property-mapper" ] } }, { "id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", @@ -4619,7 +4642,7 @@ "subType" : "anonymous", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-full-name-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper", "saml-user-property-mapper", "oidc-address-mapper", "saml-user-attribute-mapper", "saml-role-list-mapper" ] + "allowed-protocol-mapper-types" : [ "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-full-name-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "oidc-usermodel-property-mapper" ] } }, { "id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c", @@ -4709,7 +4732,7 @@ "internationalizationEnabled" : false, "supportedLocales" : [ ], "authenticationFlows" : [ { - "id" : "c54f2b16-9254-481a-9997-fb6cafaa2c00", + "id" : "38a6b336-b026-46be-a8be-e8ff7b9da407", "alias" : "Account verification options", "description" : "Method with which to verity the existing account", "providerId" : "basic-flow", @@ -4731,7 +4754,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "eae97d77-649e-4475-a0a3-57fea93a6b5a", + "id" : "eb9fe753-cd35-4e65-bb34-e83ba7059566", "alias" : "Authentication Options", "description" : "Authentication options.", "providerId" : "basic-flow", @@ -4760,7 +4783,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "1fe6063d-c996-44ae-a082-c11d35b4f9ff", + "id" : "aa9c74f7-0426-4440-907f-4aa0f999eb1e", "alias" : "Browser - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4782,7 +4805,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "61f58306-7a2f-46ad-994f-04b5eb2a8146", + "id" : "eb2a0849-c316-46bc-8b06-fd0cc50e3f32", "alias" : "Direct Grant - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4804,7 +4827,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "3b4f8b2f-cf0f-45d8-9105-65b1b3d088d5", + "id" : "8f064003-823b-4be1-aa66-7324bf38c741", "alias" : "First broker login - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4826,7 +4849,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "211cd18a-4f93-4b60-ba6f-ae55860a0dbc", + "id" : "eef22678-b09c-4ca8-bdcf-90ea44ff0120", "alias" : "Handle Existing Account", "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", "providerId" : "basic-flow", @@ -4848,7 +4871,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7c1d5fb9-14f1-4603-bfec-449f8d98e1ea", + "id" : "4367f263-ef2c-426e-b5cd-49fff868ea1a", "alias" : "Reset - Conditional OTP", "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", "providerId" : "basic-flow", @@ -4870,7 +4893,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "4456ff81-c720-4a78-9096-12c42902da8b", + "id" : "b2e9c608-1779-4c03-b32a-03c77450abae", "alias" : "User creation or linking", "description" : "Flow for the existing/non-existing user alternatives", "providerId" : "basic-flow", @@ -4893,7 +4916,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "8bb53624-acec-447a-a768-532222ff2e8f", + "id" : "a8c79324-1881-4bb0-a8a2-83dfd54cacd1", "alias" : "Verify Existing Account by Re-authentication", "description" : "Reauthentication of existing account", "providerId" : "basic-flow", @@ -4915,7 +4938,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "10a8b52f-b5a1-45ab-aeb6-26963d2c4ec4", + "id" : "d1aa83c6-da36-4cb6-b6ed-f6ec556df614", "alias" : "browser", "description" : "browser based authentication", "providerId" : "basic-flow", @@ -4951,7 +4974,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "6ca05d13-7efc-43a4-8569-e7d45cb6db57", + "id" : "2afecfef-4bfb-4842-b338-7ed032a618d2", "alias" : "clients", "description" : "Base authentication for clients", "providerId" : "client-flow", @@ -4987,7 +5010,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "2439ca39-9c37-4174-9b26-787604440ad6", + "id" : "34dc1854-4969-4065-90e6-fef38b0dea98", "alias" : "direct grant", "description" : "OpenID Connect Resource Owner Grant", "providerId" : "basic-flow", @@ -5016,7 +5039,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "8c5814fd-61c0-40d3-9176-332b4558afb3", + "id" : "40557323-dbbc-48ee-9ed1-748b11c9628d", "alias" : "docker auth", "description" : "Used by Docker clients to authenticate against the IDP", "providerId" : "basic-flow", @@ -5031,7 +5054,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "104b06aa-5ce5-490b-9945-0d032d4d521b", + "id" : "d18b5c50-39fa-4b11-a7d2-0e6768e275c1", "alias" : "first broker login", "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", "providerId" : "basic-flow", @@ -5054,7 +5077,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "94dc8a72-c455-4fa1-abeb-ca7f248e24a6", + "id" : "976be80d-a88b-412c-8ad2-9ebe427793d4", "alias" : "forms", "description" : "Username, password, otp and other auth forms.", "providerId" : "basic-flow", @@ -5076,7 +5099,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "3ee49693-4049-48ff-9c4d-7ffe6507779d", + "id" : "83b3a411-ff7c-4cba-845a-9554c536d6b1", "alias" : "http challenge", "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", "providerId" : "basic-flow", @@ -5098,7 +5121,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "444e348b-72ed-49e7-949f-b79fc08066d2", + "id" : "1cb835a6-b38c-4f29-a6d8-d04d0a84d05e", "alias" : "registration", "description" : "registration flow", "providerId" : "basic-flow", @@ -5114,7 +5137,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "b37ea96c-603f-4096-bca4-5f819c243aaf", + "id" : "7ec06c82-6802-4ff4-a3ab-9b6a0b8dbc4b", "alias" : "registration form", "description" : "registration form", "providerId" : "form-flow", @@ -5150,7 +5173,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "6471829e-0771-4bd7-aa62-797eda24d5c2", + "id" : "f3bc2f7b-2074-4d93-9578-3abf648a6681", "alias" : "reset credentials", "description" : "Reset credentials for a user if they forgot their password or something", "providerId" : "basic-flow", @@ -5186,7 +5209,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "c743556b-fdfc-4615-8154-a8ad4019dfaa", + "id" : "e62e031b-9922-4682-b867-bc5c3a4a7e99", "alias" : "saml ecp", "description" : "SAML ECP Profile Authentication Flow", "providerId" : "basic-flow", @@ -5202,13 +5225,13 @@ } ] } ], "authenticatorConfig" : [ { - "id" : "92245b69-55b4-4bc3-98f1-03ef168f009e", + "id" : "c449f0aa-5f3c-4107-9f04-3222fa93a486", "alias" : "create unique user config", "config" : { "require.password.update.after.registration" : "false" } }, { - "id" : "3e657993-a0fc-4073-88d5-882310927e19", + "id" : "f7a6ed54-0ab8-4f29-9877-960bd65bf394", "alias" : "review profile config", "config" : { "update.profile.on.first.login" : "missing" diff --git a/spiffworkflow-backend/keycloak/test_user_lists/status b/spiffworkflow-backend/keycloak/test_user_lists/status index 7b76e707..5af7736d 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/status +++ b/spiffworkflow-backend/keycloak/test_user_lists/status @@ -21,6 +21,7 @@ core14.contributor@status.im,229 core15.contributor@status.im,230 core16.contributor@status.im,231 core17.contributor@status.im,232 +core18.contributor@status.im,233 core2.contributor@status.im,156 core3.contributor@status.im,157 core4.contributor@status.im,158 From 8dc7c5fb2fb6dead103442c96d511eaedb5a6e1d Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 17 Mar 2023 10:00:59 -0400 Subject: [PATCH 046/162] added bpmn_name columns to definition tables and added test for simple logs as well --- .../{8ee0f1c23cc7_.py => 8dce75b80bfd_.py} | 12 +++- .../models/bpmn_process_definition.py | 1 + .../models/task_definition.py | 2 + .../routes/process_instances_controller.py | 27 +++++++- .../services/process_instance_processor.py | 4 ++ .../manual_task_with_subprocesses.bpmn | 4 +- .../test_process_to_call.bpmn | 4 +- .../tests/data/simple_form/simple_form.bpmn | 68 +++++++++++++------ .../integration/test_logging_service.py | 65 ++++++++++++++++-- .../unit/test_process_instance_processor.py | 12 ++-- 10 files changed, 162 insertions(+), 37 deletions(-) rename spiffworkflow-backend/migrations/versions/{8ee0f1c23cc7_.py => 8dce75b80bfd_.py} (97%) diff --git a/spiffworkflow-backend/migrations/versions/8ee0f1c23cc7_.py b/spiffworkflow-backend/migrations/versions/8dce75b80bfd_.py similarity index 97% rename from spiffworkflow-backend/migrations/versions/8ee0f1c23cc7_.py rename to spiffworkflow-backend/migrations/versions/8dce75b80bfd_.py index 47641e3c..6618a304 100644 --- a/spiffworkflow-backend/migrations/versions/8ee0f1c23cc7_.py +++ b/spiffworkflow-backend/migrations/versions/8dce75b80bfd_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: 8ee0f1c23cc7 +Revision ID: 8dce75b80bfd Revises: -Create Date: 2023-03-16 16:24:47.364768 +Create Date: 2023-03-17 09:08:24.146736 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. -revision = '8ee0f1c23cc7' +revision = '8dce75b80bfd' down_revision = None branch_labels = None depends_on = None @@ -22,6 +22,7 @@ def upgrade(): sa.Column('id', sa.Integer(), nullable=False), sa.Column('hash', sa.String(length=255), nullable=False), sa.Column('bpmn_identifier', sa.String(length=255), nullable=False), + sa.Column('bpmn_name', sa.String(length=255), nullable=True), sa.Column('properties_json', sa.JSON(), nullable=False), sa.Column('type', sa.String(length=32), nullable=True), sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True), @@ -31,6 +32,7 @@ def upgrade(): sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_bpmn_process_definition_bpmn_identifier'), 'bpmn_process_definition', ['bpmn_identifier'], unique=False) + op.create_index(op.f('ix_bpmn_process_definition_bpmn_name'), 'bpmn_process_definition', ['bpmn_name'], unique=False) op.create_index(op.f('ix_bpmn_process_definition_hash'), 'bpmn_process_definition', ['hash'], unique=True) op.create_table('correlation_property_cache', sa.Column('id', sa.Integer(), nullable=False), @@ -187,6 +189,7 @@ def upgrade(): sa.Column('id', sa.Integer(), nullable=False), sa.Column('bpmn_process_definition_id', sa.Integer(), nullable=False), sa.Column('bpmn_identifier', sa.String(length=255), nullable=False), + sa.Column('bpmn_name', sa.String(length=255), nullable=True), sa.Column('properties_json', sa.JSON(), nullable=False), sa.Column('typename', sa.String(length=255), nullable=False), sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), @@ -196,6 +199,7 @@ def upgrade(): sa.UniqueConstraint('bpmn_process_definition_id', 'bpmn_identifier', name='task_definition_unique') ) op.create_index(op.f('ix_task_definition_bpmn_identifier'), 'task_definition', ['bpmn_identifier'], unique=False) + op.create_index(op.f('ix_task_definition_bpmn_name'), 'task_definition', ['bpmn_name'], unique=False) op.create_table('user_group_assignment', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), @@ -427,6 +431,7 @@ def downgrade(): op.drop_table('permission_assignment') op.drop_table('user_group_assignment_waiting') op.drop_table('user_group_assignment') + op.drop_index(op.f('ix_task_definition_bpmn_name'), table_name='task_definition') op.drop_index(op.f('ix_task_definition_bpmn_identifier'), table_name='task_definition') op.drop_table('task_definition') op.drop_table('secret') @@ -453,6 +458,7 @@ def downgrade(): op.drop_table('group') op.drop_table('correlation_property_cache') op.drop_index(op.f('ix_bpmn_process_definition_hash'), table_name='bpmn_process_definition') + op.drop_index(op.f('ix_bpmn_process_definition_bpmn_name'), table_name='bpmn_process_definition') op.drop_index(op.f('ix_bpmn_process_definition_bpmn_identifier'), table_name='bpmn_process_definition') op.drop_table('bpmn_process_definition') # ### end Alembic commands ### diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition.py index 4e7744ef..75901ae2 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition.py @@ -21,6 +21,7 @@ class BpmnProcessDefinitionModel(SpiffworkflowBaseDBModel): hash: str = db.Column(db.String(255), nullable=False, index=True, unique=True) bpmn_identifier: str = db.Column(db.String(255), nullable=False, index=True) + bpmn_name: str = db.Column(db.String(255), nullable=True, index=True) properties_json: dict = db.Column(db.JSON, nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py index 358b7c1c..6868f2cf 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py @@ -28,6 +28,8 @@ class TaskDefinitionModel(SpiffworkflowBaseDBModel): bpmn_process_definition = relationship(BpmnProcessDefinitionModel) bpmn_identifier: str = db.Column(db.String(255), nullable=False, index=True) + bpmn_name: str = db.Column(db.String(255), nullable=True, index=True) + properties_json: dict = db.Column(db.JSON, nullable=False) typename: str = db.Column(db.String(255), nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 86764e45..a63d8e25 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -235,20 +235,41 @@ def process_instance_log_list( # ) # .paginate(page=page, per_page=per_page, error_out=False) # ) - log_query = TaskModel.query.filter_by(process_instance_id=process_instance.id) - logs = ( - log_query.order_by(TaskModel.end_in_seconds.desc()) # type: ignore + log_query = ( + TaskModel.query.filter_by(process_instance_id=process_instance.id) .join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id) .join( BpmnProcessDefinitionModel, BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id ) + ) + if not detailed: + log_query = log_query.filter( + # 1. this was the previous implementation, where we only show completed tasks and skipped tasks. + # maybe we want to iterate on this in the future (in a third tab under process instance logs?) + # or_( + # SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore + # SpiffLoggingModel.message.like("Skipped task %"), # type: ignore + # ) + # 2. We included ["End Event", "Default Start Event"] along with Default Throwing Event, but feb 2023 + # we decided to remove them, since they get really chatty when there are lots of subprocesses and call activities. + and_( + TaskModel.state.in_(["COMPLETED"]), # type: ignore + TaskDefinitionModel.typename.in_(["IntermediateThrowEvent"]), # type: ignore + ) + ) + + logs = ( + log_query.order_by(TaskModel.end_in_seconds.desc()) # type: ignore .outerjoin(HumanTaskModel, HumanTaskModel.task_model_id == TaskModel.id) .outerjoin(UserModel, UserModel.id == HumanTaskModel.completed_by_user_id) .add_columns( TaskModel.guid.label("spiff_task_guid"), # type: ignore UserModel.username, BpmnProcessDefinitionModel.bpmn_identifier.label("bpmn_process_definition_identifier"), # type: ignore + BpmnProcessDefinitionModel.bpmn_name.label("bpmn_process_definition_name"), # type: ignore TaskDefinitionModel.bpmn_identifier.label("task_definition_identifier"), # type: ignore + TaskDefinitionModel.bpmn_name.label("task_definition_name"), # type: ignore + TaskDefinitionModel.typename.label("bpmn_type"), # type: ignore ) .paginate(page=page, per_page=per_page, error_out=False) ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 0aac218f..768ebc22 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -996,6 +996,7 @@ class ProcessInstanceProcessor: store_bpmn_definition_mappings: bool = False, ) -> BpmnProcessDefinitionModel: process_bpmn_identifier = process_bpmn_properties["name"] + process_bpmn_name = process_bpmn_properties["description"] new_hash_digest = sha256(json.dumps(process_bpmn_properties, sort_keys=True).encode("utf8")).hexdigest() bpmn_process_definition: Optional[BpmnProcessDefinitionModel] = BpmnProcessDefinitionModel.query.filter_by( hash=new_hash_digest @@ -1006,6 +1007,7 @@ class ProcessInstanceProcessor: bpmn_process_definition = BpmnProcessDefinitionModel( hash=new_hash_digest, bpmn_identifier=process_bpmn_identifier, + bpmn_name=process_bpmn_name, properties_json=process_bpmn_properties, ) db.session.add(bpmn_process_definition) @@ -1016,9 +1018,11 @@ class ProcessInstanceProcessor: ) for task_bpmn_identifier, task_bpmn_properties in task_specs.items(): + task_bpmn_name = task_bpmn_properties["description"] task_definition = TaskDefinitionModel( bpmn_process_definition=bpmn_process_definition, bpmn_identifier=task_bpmn_identifier, + bpmn_name=task_bpmn_name, properties_json=task_bpmn_properties, typename=task_bpmn_properties["typename"], ) diff --git a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn index 11d2dbb0..939c8c0b 100644 --- a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn @@ -16,7 +16,7 @@ Flow_09gjylo - + Flow_0stlaxe Flow_1fktmf7 set_in_top_level_script = 1 @@ -35,7 +35,7 @@ Flow_1b4o55k - + Flow_00k1tii Flow_1b4o55k set_in_top_level_subprocess = 1 diff --git a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn index 25b37c61..299f078e 100644 --- a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn @@ -1,6 +1,6 @@ - + Flow_06g687y @@ -9,7 +9,7 @@ Flow_01e21r0 - + Flow_06g687y Flow_01e21r0 set_in_test_process_to_call_script = 1 diff --git a/spiffworkflow-backend/tests/data/simple_form/simple_form.bpmn b/spiffworkflow-backend/tests/data/simple_form/simple_form.bpmn index a2f29fd3..01dbda50 100644 --- a/spiffworkflow-backend/tests/data/simple_form/simple_form.bpmn +++ b/spiffworkflow-backend/tests/data/simple_form/simple_form.bpmn @@ -6,9 +6,8 @@ - Flow_1boyhcj + Flow_1scft9v - Hello {{ name }} @@ -16,10 +15,9 @@ Department: {{ department }} user_completing_task = get_last_user_completing_task("Process_WithForm", "Activity_SimpleForm") - Flow_1ly1khd - Flow_1boyhcj + Flow_028o7v5 + Flow_18ytjgo - @@ -29,36 +27,68 @@ Department: {{ department }} process_initiator_user = get_process_initiator_user() Flow_0smvjir - Flow_1ly1khd + Flow_163ufsx + + Flow_163ufsx + Flow_028o7v5 + + + + Flow_18ytjgo + Flow_1scft9v + + + + - - - - - - - + + + + + + + + + + + + + + + + + + + - - - - - + - + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py index df13ceb6..f277cad5 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py @@ -1,9 +1,12 @@ """Test_logging_service.""" +from uuid import UUID + from flask.app import Flask from flask.testing import FlaskClient from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.test_data import load_test_spec +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.process_instance_processor import ( @@ -15,9 +18,7 @@ from spiffworkflow_backend.services.process_instance_service import ( class TestLoggingService(BaseTest): - """Test logging service.""" - - def test_logging_service_spiff_logger( + def test_logging_service_detailed_logs( self, app: Flask, client: FlaskClient, @@ -58,7 +59,7 @@ class TestLoggingService(BaseTest): assert log_response.status_code == 200 assert log_response.json logs: list = log_response.json["results"] - assert len(logs) == 7 + assert len(logs) == 9 for log in logs: assert log["process_instance_id"] == process_instance.id @@ -67,9 +68,65 @@ class TestLoggingService(BaseTest): "end_in_seconds", "spiff_task_guid", "bpmn_process_definition_identifier", + "bpmn_process_definition_name", "task_definition_identifier", + "task_definition_name", + "bpmn_type", ]: assert key in log.keys() if log["task_definition_identifier"] == "Activity_SimpleForm": assert log["username"] == initiator_user.username + + def test_logging_service_simple_logs( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + initiator_user = self.find_or_create_user("initiator_user") + assert initiator_user.principal is not None + AuthorizationService.import_permissions_from_yaml_file() + + process_model = load_test_spec( + process_model_id="misc/category_number_one/simple_form", + # bpmn_file_name="simp.bpmn", + process_model_source_directory="simple_form", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=initiator_user + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] + assert len(human_task.potential_owners) == 1 + assert human_task.potential_owners[0] == initiator_user + + spiff_task = processor.__class__.get_task_by_bpmn_identifier( + human_task.task_name, processor.bpmn_process_instance + ) + ProcessInstanceService.complete_form_task(processor, spiff_task, {"name": "HEY"}, initiator_user, human_task) + + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + processor = ProcessInstanceProcessor(process_instance) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + + headers = self.logged_in_headers(with_super_admin_user) + log_response = client.get( + f"/v1.0/logs/{self.modify_process_identifier_for_path_param(process_model.id)}/{process_instance.id}?detailed=false", + headers=headers, + ) + assert log_response.status_code == 200 + assert log_response.json + logs: list = log_response.json["results"] + assert len(logs) == 2 + + for log in logs: + assert log["process_instance_id"] == process_instance.id + assert log["bpmn_type"] == "IntermediateThrowEvent" diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index a1cdec22..70f97328 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -336,9 +336,9 @@ class TestProcessInstanceProcessor(BaseTest): spiff_tasks_checked_once: list = [] # TODO: also check task data here from the spiff_task directly to ensure we hydrated spiff correctly - def assert_spiff_task_is_in_process(spiff_task_name: str, bpmn_process_identifier: str) -> None: - if spiff_task.task_spec.name == spiff_task_name: - base_failure_message = f"Failed on {bpmn_process_identifier} - {spiff_task_name}." + def assert_spiff_task_is_in_process(spiff_task_identifier: str, bpmn_process_identifier: str) -> None: + if spiff_task.task_spec.name == spiff_task_identifier: + base_failure_message = f"Failed on {bpmn_process_identifier} - {spiff_task_identifier}." expected_python_env_data = expected_task_data[spiff_task.task_spec.name] if spiff_task.task_spec.name in spiff_tasks_checked_once: expected_python_env_data = expected_task_data[f"{spiff_task.task_spec.name}_second"] @@ -347,9 +347,12 @@ class TestProcessInstanceProcessor(BaseTest): assert task_model.start_in_seconds is not None assert task_model.end_in_seconds is not None assert task_model.task_definition_id is not None + task_definition = task_model.task_definition - assert task_definition.bpmn_identifier == spiff_task_name + assert task_definition.bpmn_identifier == spiff_task_identifier + assert task_definition.bpmn_name == spiff_task_identifier.replace("_", " ").title() assert task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier + message = ( f"{base_failure_message} Expected: {expected_python_env_data}. Received: {task_model.json_data()}" ) @@ -373,6 +376,7 @@ class TestProcessInstanceProcessor(BaseTest): bpmn_process_definition = bpmn_process.bpmn_process_definition assert bpmn_process_definition is not None assert bpmn_process_definition.bpmn_identifier == "test_process_to_call" + assert bpmn_process_definition.bpmn_name == "Test Process To Call" assert processor.get_data() == fifth_data_set From 44b166fba8550c002ebd4d9c5936e05d04df53ef Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 17 Mar 2023 10:30:29 -0400 Subject: [PATCH 047/162] updated log list view in frontend w/ burnettk --- .../routes/process_instances_controller.py | 8 ++++++-- .../integration/test_logging_service.py | 6 +++--- .../src/routes/ProcessInstanceLogList.tsx | 17 ++++++++--------- 3 files changed, 17 insertions(+), 14 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index a63d8e25..9f883c0a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -257,9 +257,13 @@ def process_instance_log_list( TaskDefinitionModel.typename.in_(["IntermediateThrowEvent"]), # type: ignore ) ) + else: + log_query = log_query.filter( + TaskModel.state.in_(["COMPLETED"]), # type: ignore + ) logs = ( - log_query.order_by(TaskModel.end_in_seconds.desc()) # type: ignore + log_query.order_by(TaskModel.end_in_seconds.desc(), TaskModel.id.desc()) # type: ignore .outerjoin(HumanTaskModel, HumanTaskModel.task_model_id == TaskModel.id) .outerjoin(UserModel, UserModel.id == HumanTaskModel.completed_by_user_id) .add_columns( @@ -269,7 +273,7 @@ def process_instance_log_list( BpmnProcessDefinitionModel.bpmn_name.label("bpmn_process_definition_name"), # type: ignore TaskDefinitionModel.bpmn_identifier.label("task_definition_identifier"), # type: ignore TaskDefinitionModel.bpmn_name.label("task_definition_name"), # type: ignore - TaskDefinitionModel.typename.label("bpmn_type"), # type: ignore + TaskDefinitionModel.typename.label("bpmn_task_type"), # type: ignore ) .paginate(page=page, per_page=per_page, error_out=False) ) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py index f277cad5..990cc3ba 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py @@ -59,7 +59,7 @@ class TestLoggingService(BaseTest): assert log_response.status_code == 200 assert log_response.json logs: list = log_response.json["results"] - assert len(logs) == 9 + assert len(logs) == 4 for log in logs: assert log["process_instance_id"] == process_instance.id @@ -71,7 +71,7 @@ class TestLoggingService(BaseTest): "bpmn_process_definition_name", "task_definition_identifier", "task_definition_name", - "bpmn_type", + "bpmn_task_type", ]: assert key in log.keys() @@ -129,4 +129,4 @@ class TestLoggingService(BaseTest): for log in logs: assert log["process_instance_id"] == process_instance.id - assert log["bpmn_type"] == "IntermediateThrowEvent" + assert log["bpmn_task_type"] == "IntermediateThrowEvent" diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index 5c1803f7..05c2eb87 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -54,15 +54,16 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { const tableRow = []; const taskNameCell = ( - {row.bpmn_task_name || - (row.bpmn_task_type === 'Default Start Event' - ? 'Process Started' - : '') || - (row.bpmn_task_type === 'End Event' ? 'Process Ended' : '')} + {row.task_definition_name || + (row.bpmn_task_type === 'StartEvent' ? 'Process Started' : '') || + (row.bpmn_task_type === 'EndEvent' ? 'Process Ended' : '')} ); const bpmnProcessCell = ( - {row.bpmn_process_name || row.bpmn_process_identifier} + + {row.bpmn_process_definition_name || + row.bpmn_process_definition_identifier} + ); if (isDetailedView) { tableRow.push( @@ -84,7 +85,6 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { tableRow.push( <> {row.bpmn_task_type} - {row.message} {row.username || ( system @@ -99,7 +99,7 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { data-qa="process-instance-show-link" to={`${processInstanceShowPageBaseUrl}/${row.process_instance_id}/${row.spiff_step}`} > - {convertSecondsToFormattedDateTime(row.timestamp)} + {convertSecondsToFormattedDateTime(row.end_in_seconds)} ); @@ -132,7 +132,6 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { tableHeaders.push( <> Task Type - Message User ); From 6d847a10e0468947fce4525d74e80e7efb7a5367 Mon Sep 17 00:00:00 2001 From: jbirddog <100367399+jbirddog@users.noreply.github.com> Date: Fri, 17 Mar 2023 11:55:11 -0400 Subject: [PATCH 048/162] UAT bug fix: suspended/terminated instances automagically resume (#186) --- .../routes/process_instances_controller.py | 38 +++++++++++++++++-- .../services/process_instance_processor.py | 18 ++++----- .../services/process_instance_service.py | 8 ++++ .../integration/test_process_api.py | 4 +- 4 files changed, 53 insertions(+), 15 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 489b710c..a2972793 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -170,7 +170,17 @@ def process_instance_terminate( """Process_instance_run.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) processor = ProcessInstanceProcessor(process_instance) - processor.terminate() + + try: + processor.lock_process_instance("Web") + processor.terminate() + except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e: + ErrorHandlingService().handle_error(processor, e) + raise e + finally: + if ProcessInstanceLockService.has_lock(process_instance.id): + processor.unlock_process_instance("Web") + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -180,7 +190,18 @@ def process_instance_suspend( ) -> flask.wrappers.Response: """Process_instance_suspend.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) - ProcessInstanceProcessor.suspend(process_instance) + processor = ProcessInstanceProcessor(process_instance) + + try: + processor.lock_process_instance("Web") + processor.suspend() + except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e: + ErrorHandlingService().handle_error(processor, e) + raise e + finally: + if ProcessInstanceLockService.has_lock(process_instance.id): + processor.unlock_process_instance("Web") + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -190,7 +211,18 @@ def process_instance_resume( ) -> flask.wrappers.Response: """Process_instance_resume.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) - ProcessInstanceProcessor.resume(process_instance) + processor = ProcessInstanceProcessor(process_instance) + + try: + processor.lock_process_instance("Web") + processor.resume() + except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e: + ErrorHandlingService().handle_error(processor, e) + raise e + finally: + if ProcessInstanceLockService.has_lock(process_instance.id): + processor.unlock_process_instance("Web") + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 5e771c12..fda3d395 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1249,7 +1249,7 @@ class ProcessInstanceProcessor: self.add_step() self.save() # Saving the workflow seems to reset the status - self.suspend(self.process_instance_model) + self.suspend() def reset_process(self, spiff_step: int) -> None: """Reset a process to an earlier state.""" @@ -1292,7 +1292,7 @@ class ProcessInstanceProcessor: db.session.delete(row) self.save() - self.suspend(self.process_instance_model) + self.suspend() @staticmethod def get_parser() -> MyCustomParser: @@ -1900,16 +1900,14 @@ class ProcessInstanceProcessor: db.session.add(self.process_instance_model) db.session.commit() - @classmethod - def suspend(cls, process_instance: ProcessInstanceModel) -> None: + def suspend(self) -> None: """Suspend.""" - process_instance.status = ProcessInstanceStatus.suspended.value - db.session.add(process_instance) + self.process_instance_model.status = ProcessInstanceStatus.suspended.value + db.session.add(self.process_instance_model) db.session.commit() - @classmethod - def resume(cls, process_instance: ProcessInstanceModel) -> None: + def resume(self) -> None: """Resume.""" - process_instance.status = ProcessInstanceStatus.waiting.value - db.session.add(process_instance) + self.process_instance_model.status = ProcessInstanceStatus.waiting.value + db.session.add(self.process_instance_model) db.session.commit() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index 5e149965..d7ea5613 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -25,6 +25,7 @@ from spiffworkflow_backend.models.process_instance_file_data import ( from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.task import Task from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.assertion_service import safe_assertion from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.git_service import GitCommandError from spiffworkflow_backend.services.git_service import GitService @@ -95,6 +96,13 @@ class ProcessInstanceService: ) process_instance_lock_prefix = "Background" for process_instance in records: + with safe_assertion(process_instance.status == status_value) as false_assumption: + if false_assumption: + raise AssertionError( + f"Queue assumed process instance {process_instance.id} has status of {status_value} " + f"when it really is {process_instance.status}" + ) + locked = False processor = None try: diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 086841c0..f7f644dd 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -1418,7 +1418,7 @@ class TestProcessApi(BaseTest): ) processor.save() - processor.suspend(process_instance) + processor.suspend() payload["description"] = "Message To Suspended" response = client.post( f"/v1.0/messages/{message_model_identifier}", @@ -1430,7 +1430,7 @@ class TestProcessApi(BaseTest): assert response.json assert response.json["error_code"] == "message_not_accepted" - processor.resume(process_instance) + processor.resume() payload["description"] = "Message To Resumed" response = client.post( f"/v1.0/messages/{message_model_identifier}", From 5e239f0e7fa868d4c3469b11fe25af278cae5d98 Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 17 Mar 2023 13:20:06 -0400 Subject: [PATCH 049/162] added process instance event table --- spiffworkflow-backend/migrations/env.py | 2 + .../{8dce75b80bfd_.py => 05153ab6a6b8_.py} | 26 ++++++- .../models/process_instance_event.py | 41 +++++++++++ .../routes/process_instances_controller.py | 45 +++--------- .../services/process_instance_processor.py | 22 ++++++ .../services/workflow_execution_service.py | 22 +++++- .../integration/test_logging_service.py | 4 +- .../src/routes/ProcessInstanceLogList.tsx | 18 ++++- .../src/routes/ProcessInstanceShow.tsx | 71 +++++++++++++++++++ 9 files changed, 203 insertions(+), 48 deletions(-) rename spiffworkflow-backend/migrations/versions/{8dce75b80bfd_.py => 05153ab6a6b8_.py} (94%) create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py diff --git a/spiffworkflow-backend/migrations/env.py b/spiffworkflow-backend/migrations/env.py index 630e381a..68feded2 100644 --- a/spiffworkflow-backend/migrations/env.py +++ b/spiffworkflow-backend/migrations/env.py @@ -1,3 +1,5 @@ +from __future__ import with_statement + import logging from logging.config import fileConfig diff --git a/spiffworkflow-backend/migrations/versions/8dce75b80bfd_.py b/spiffworkflow-backend/migrations/versions/05153ab6a6b8_.py similarity index 94% rename from spiffworkflow-backend/migrations/versions/8dce75b80bfd_.py rename to spiffworkflow-backend/migrations/versions/05153ab6a6b8_.py index 6618a304..5ee8fda7 100644 --- a/spiffworkflow-backend/migrations/versions/8dce75b80bfd_.py +++ b/spiffworkflow-backend/migrations/versions/05153ab6a6b8_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: 8dce75b80bfd +Revision ID: 05153ab6a6b8 Revises: -Create Date: 2023-03-17 09:08:24.146736 +Create Date: 2023-03-17 12:22:43.449203 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. -revision = '8dce75b80bfd' +revision = '05153ab6a6b8' down_revision = None branch_labels = None depends_on = None @@ -269,6 +269,21 @@ def upgrade(): sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id') ) + op.create_table('process_instance_event', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('task_guid', sa.String(length=36), nullable=True), + sa.Column('process_instance_id', sa.Integer(), nullable=False), + sa.Column('event_type', sa.String(length=50), nullable=False), + sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_process_instance_event_event_type'), 'process_instance_event', ['event_type'], unique=False) + op.create_index(op.f('ix_process_instance_event_task_guid'), 'process_instance_event', ['task_guid'], unique=False) + op.create_index(op.f('ix_process_instance_event_timestamp'), 'process_instance_event', ['timestamp'], unique=False) + op.create_index(op.f('ix_process_instance_event_user_id'), 'process_instance_event', ['user_id'], unique=False) op.create_table('process_instance_file_data', sa.Column('id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=False), @@ -424,6 +439,11 @@ def downgrade(): op.drop_table('process_instance_metadata') op.drop_index(op.f('ix_process_instance_file_data_digest'), table_name='process_instance_file_data') op.drop_table('process_instance_file_data') + op.drop_index(op.f('ix_process_instance_event_user_id'), table_name='process_instance_event') + op.drop_index(op.f('ix_process_instance_event_timestamp'), table_name='process_instance_event') + op.drop_index(op.f('ix_process_instance_event_task_guid'), table_name='process_instance_event') + op.drop_index(op.f('ix_process_instance_event_event_type'), table_name='process_instance_event') + op.drop_table('process_instance_event') op.drop_table('message_instance') op.drop_index(op.f('ix_process_instance_process_model_identifier'), table_name='process_instance') op.drop_index(op.f('ix_process_instance_process_model_display_name'), table_name='process_instance') diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py new file mode 100644 index 00000000..de965e9a --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py @@ -0,0 +1,41 @@ +from __future__ import annotations +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum +from typing import Any +from sqlalchemy.orm import validates + +from sqlalchemy import ForeignKey + +from spiffworkflow_backend.models.db import db +from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel + + +# event types take the form [SUBJECT]_[PAST_TENSE_VERB] since subject is not always the same. +class ProcessInstanceEventType(SpiffEnum): + process_instance_resumed = "process_instance_resumed" + process_instance_rewound_to_task = "process_instance_rewound_to_task" + process_instance_suspended = "process_instance_suspended" + process_instance_terminated = "process_instance_terminated" + task_completed = "task_completed" + task_data_edited = "task_data_edited" + task_executed_manually = "task_executed_manually" + task_failed = "task_failed" + task_skipped = "task_skipped" + + +class ProcessInstanceEventModel(SpiffworkflowBaseDBModel): + __tablename__ = "process_instance_event" + id: int = db.Column(db.Integer, primary_key=True) + + # use task guid so we can bulk insert without worrying about whether or not the task has an id yet + task_guid: str | None = db.Column(db.String(36), nullable=True, index=True) + process_instance_id: int = db.Column(ForeignKey("process_instance.id"), nullable=False) + + event_type: str = db.Column(db.String(50), nullable=False, index=True) + timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False, index=True) + + user_id = db.Column(ForeignKey(UserModel.id), nullable=True, index=True) # type: ignore + + @validates("event_type") + def validate_event_type(self, key: str, value: Any) -> Any: + return self.validate_enum_field(key, value, ProcessInstanceEventType) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index cd84f3d4..e82a1aa9 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -29,6 +29,7 @@ from spiffworkflow_backend.models.process_instance import ( ) from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel from spiffworkflow_backend.models.process_instance_metadata import ( ProcessInstanceMetadataModel, ) @@ -240,37 +241,11 @@ def process_instance_log_list( # to make sure the process instance exists process_instance = _find_process_instance_by_id_or_raise(process_instance_id) - # log_query = SpiffLoggingModel.query.filter(SpiffLoggingModel.process_instance_id == process_instance.id) - # if not detailed: - # log_query = log_query.filter( - # # 1. this was the previous implementation, where we only show completed tasks and skipped tasks. - # # maybe we want to iterate on this in the future (in a third tab under process instance logs?) - # # or_( - # # SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore - # # SpiffLoggingModel.message.like("Skipped task %"), # type: ignore - # # ) - # # 2. We included ["End Event", "Default Start Event"] along with Default Throwing Event, but feb 2023 - # # we decided to remove them, since they get really chatty when there are lots of subprocesses and call activities. - # and_( - # SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore - # SpiffLoggingModel.bpmn_task_type.in_(["Default Throwing Event"]), # type: ignore - # ) - # ) - # - # logs = ( - # log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore - # .join( - # UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True - # ) # isouter since if we don't have a user, we still want the log - # .add_columns( - # UserModel.username, - # ) - # .paginate(page=page, per_page=per_page, error_out=False) - # ) log_query = ( - TaskModel.query.filter_by(process_instance_id=process_instance.id) - .join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id) - .join( + ProcessInstanceEventModel.query.filter_by(process_instance_id=process_instance.id) + .outerjoin(TaskModel, TaskModel.guid == ProcessInstanceEventModel.task_guid) + .outerjoin(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id) + .outerjoin( BpmnProcessDefinitionModel, BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id ) ) @@ -289,15 +264,11 @@ def process_instance_log_list( TaskDefinitionModel.typename.in_(["IntermediateThrowEvent"]), # type: ignore ) ) - else: - log_query = log_query.filter( - TaskModel.state.in_(["COMPLETED"]), # type: ignore - ) logs = ( - log_query.order_by(TaskModel.end_in_seconds.desc(), TaskModel.id.desc()) # type: ignore - .outerjoin(HumanTaskModel, HumanTaskModel.task_model_id == TaskModel.id) - .outerjoin(UserModel, UserModel.id == HumanTaskModel.completed_by_user_id) + log_query.order_by(ProcessInstanceEventModel.timestamp.desc(), + ProcessInstanceEventModel.id.desc()) # type: ignore + .outerjoin(UserModel, UserModel.id == ProcessInstanceEventModel.user_id) .add_columns( TaskModel.guid.label("spiff_task_guid"), # type: ignore UserModel.username, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 7435f39e..0338aee3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1,5 +1,7 @@ """Process_instance_processor.""" import _strptime # type: ignore +from flask import g +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel, ProcessInstanceEventType import decimal import json import logging @@ -1811,6 +1813,9 @@ class ProcessInstanceProcessor: json_data = JsonDataModel(**json_data_dict) db.session.add(json_data) + self.add_event_to_process_instance(self.process_instance_model, + ProcessInstanceEventType.task_completed.value, task_guid=task_model.guid) + # this is the thing that actually commits the db transaction (on behalf of the other updates above as well) self.save() @@ -1935,16 +1940,33 @@ class ProcessInstanceProcessor: self.save() self.process_instance_model.status = "terminated" db.session.add(self.process_instance_model) + self.add_event_to_process_instance(self.process_instance_model, + ProcessInstanceEventType.process_instance_terminated.value) db.session.commit() def suspend(self) -> None: """Suspend.""" self.process_instance_model.status = ProcessInstanceStatus.suspended.value db.session.add(self.process_instance_model) + self.add_event_to_process_instance(self.process_instance_model, + ProcessInstanceEventType.process_instance_suspended.value) db.session.commit() def resume(self) -> None: """Resume.""" self.process_instance_model.status = ProcessInstanceStatus.waiting.value db.session.add(self.process_instance_model) + self.add_event_to_process_instance(self.process_instance_model, + ProcessInstanceEventType.process_instance_resumed.value) db.session.commit() + + @classmethod + def add_event_to_process_instance(cls, process_instance: ProcessInstanceModel, event_type: str, task_guid: Optional[str] = None) -> None: + user_id = None + if g.user: + user_id = g.user.id + process_instance_event = ProcessInstanceEventModel( + process_instance_id=process_instance.id, event_type=event_type, timestamp=time.time(), user_id=user_id) + if task_guid: + process_instance_event.task_guid = task_guid + db.session.add(process_instance_event) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index fe98ac80..f7320e34 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -17,6 +17,7 @@ from spiffworkflow_backend.models.message_instance_correlation import ( MessageInstanceCorrelationRuleModel, ) from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.task import TaskModel # noqa: F401 from spiffworkflow_backend.services.assertion_service import safe_assertion @@ -63,12 +64,14 @@ class TaskModelSavingDelegate(EngineStepDelegate): self.secondary_engine_step_delegate = secondary_engine_step_delegate self.process_instance = process_instance self.bpmn_definition_to_task_definitions_mappings = bpmn_definition_to_task_definitions_mappings + self.serializer = serializer self.current_task_model: Optional[TaskModel] = None self.current_task_start_in_seconds: Optional[float] = None + self.task_models: dict[str, TaskModel] = {} self.json_data_dicts: dict[str, JsonDataDict] = {} - self.serializer = serializer + self.process_instance_events: dict[str, ProcessInstanceEventModel] = {} def will_complete_task(self, spiff_task: SpiffTask) -> None: if self._should_update_task_model(): @@ -90,9 +93,10 @@ class TaskModelSavingDelegate(EngineStepDelegate): script_engine = bpmn_process_instance.script_engine if hasattr(script_engine, "failing_spiff_task") and script_engine.failing_spiff_task is not None: failing_spiff_task = script_engine.failing_spiff_task - self._update_task_model_with_spiff_task(failing_spiff_task) + self._update_task_model_with_spiff_task(failing_spiff_task, task_failed=True) db.session.bulk_save_objects(self.task_models.values()) + db.session.bulk_save_objects(self.process_instance_events.values()) TaskService.insert_or_update_json_data_records(self.json_data_dicts) @@ -121,7 +125,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): if json_data_dict is not None: self.json_data_dicts[json_data_dict["hash"]] = json_data_dict - def _update_task_model_with_spiff_task(self, spiff_task: SpiffTask) -> TaskModel: + def _update_task_model_with_spiff_task(self, spiff_task: SpiffTask, task_failed: bool = False) -> TaskModel: bpmn_process, task_model, new_task_models, new_json_data_dicts = ( TaskService.find_or_create_task_model_from_spiff_task( spiff_task, @@ -141,6 +145,18 @@ class TaskModelSavingDelegate(EngineStepDelegate): json_data_dict_list.append(bpmn_process_json_data) self._update_json_data_dicts_using_list(json_data_dict_list) + if task_model.state == "COMPLETED" or task_failed: + event_type = "task_completed" + if task_failed: + event_type = "task_errored" + + # FIXME: some failed tasks will currently not have either timestamp since we only hook into spiff when tasks complete + # which script tasks execute when READY. + timestamp = task_model.end_in_seconds or task_model.start_in_seconds or time.time() + process_instance_event = ProcessInstanceEventModel( + task_guid=task_model.guid, process_instance_id=self.process_instance.id, event_type=event_type, timestamp=timestamp) + self.process_instance_events[task_model.guid] = process_instance_event + return task_model diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py index 990cc3ba..f79a3295 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py @@ -64,8 +64,8 @@ class TestLoggingService(BaseTest): for log in logs: assert log["process_instance_id"] == process_instance.id for key in [ - "start_in_seconds", - "end_in_seconds", + "event_type", + "timestamp", "spiff_task_guid", "bpmn_process_definition_identifier", "bpmn_process_definition_name", diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index 05c2eb87..365ab7f1 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -85,6 +85,7 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { tableRow.push( <> {row.bpmn_task_type} + {row.event_type} {row.username || ( system @@ -99,7 +100,7 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { data-qa="process-instance-show-link" to={`${processInstanceShowPageBaseUrl}/${row.process_instance_id}/${row.spiff_step}`} > - {convertSecondsToFormattedDateTime(row.end_in_seconds)} + {convertSecondsToFormattedDateTime(row.timestamp)} ); @@ -132,6 +133,7 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { tableHeaders.push( <> Task Type + Event User ); @@ -177,7 +179,7 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { setSearchParams(searchParams); }} > - Simple + Milestones - Detailed + Events + {/* + Suspend + Resumed + Terminated + + Skipped? + Rewind? + Execute? + Edit? + */}
diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 36c06d23..652e07b0 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -381,6 +381,40 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { {lastUpdatedTimeTag} + {/* + + + Suspended at:{' '} + + + 2023-03-17 10:12:05 (by jason) + + + + + Resumed at:{' '} + + + 2023-03-17 10:13:05 (by jason) + + + + + Suspended at:{' '} + + + 2023-03-17 10:14:05 (by jason) + + + + + Terminated at:{' '} + + + 2023-03-17 10:15:05 (by jason) + + + */} Process model revision:{' '} @@ -400,6 +434,43 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { + + {/* +
+ + + Suspended at:{' '} + + + 2023-03-17 10:12:05 (by jason) + + + + + Resumed at:{' '} + + + 2023-03-17 10:13:05 (by jason) + + + + + Suspended at:{' '} + + + 2023-03-17 10:14:05 (by jason) + + + + + Terminated at:{' '} + + + 2023-03-17 10:15:05 (by jason) + + + */} +
From b5cfcc1fc650c758c9495c2cde9a92bd95336b23 Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 17 Mar 2023 13:51:56 -0400 Subject: [PATCH 050/162] added event logs for skipping and manually editing a task --- spiffworkflow-backend/migrations/env.py | 2 - .../models/process_instance_event.py | 7 +- .../routes/process_api_blueprint.py | 43 +++++------ .../routes/process_instances_controller.py | 5 +- .../services/process_instance_processor.py | 35 +++++---- .../services/task_service.py | 10 +-- .../services/workflow_execution_service.py | 6 +- .../src/routes/ProcessInstanceShow.tsx | 71 ------------------- 8 files changed, 63 insertions(+), 116 deletions(-) diff --git a/spiffworkflow-backend/migrations/env.py b/spiffworkflow-backend/migrations/env.py index 68feded2..630e381a 100644 --- a/spiffworkflow-backend/migrations/env.py +++ b/spiffworkflow-backend/migrations/env.py @@ -1,5 +1,3 @@ -from __future__ import with_statement - import logging from logging.config import fileConfig diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py index de965e9a..560abffd 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py @@ -1,13 +1,14 @@ from __future__ import annotations -from spiffworkflow_backend.models.user import UserModel -from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum + from typing import Any -from sqlalchemy.orm import validates from sqlalchemy import ForeignKey +from sqlalchemy.orm import validates +from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel +from spiffworkflow_backend.models.user import UserModel # event types take the form [SUBJECT]_[PAST_TENSE_VERB] since subject is not always the same. diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 32becbc6..a07f5f49 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -17,24 +17,28 @@ from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( ProcessEntityNotFoundError, ) from spiffworkflow_backend.models.db import db +from spiffworkflow_backend.models.json_data import JsonDataModel from spiffworkflow_backend.models.principal import PrincipalModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema from spiffworkflow_backend.models.process_instance import ( ProcessInstanceTaskDataCannotBeUpdatedError, ) +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.models.process_instance_file_data import ( ProcessInstanceFileDataModel, ) from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema +from spiffworkflow_backend.models.task import TaskModel # noqa: F401 from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.git_service import GitService from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.task_service import TaskService process_api_blueprint = Blueprint("process_api", __name__) @@ -180,34 +184,33 @@ def task_data_update( f" It is currently: {process_instance.status}" ) - process_instance_data = process_instance.process_instance_data - if process_instance_data is None: + task_model = TaskModel.query.filter_by(guid=task_id).first() + if task_model is None: raise ApiError( - error_code="process_instance_data_not_found", - message=f"Could not find task data related to process instance: {process_instance.id}", + error_code="update_task_data_error", + message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.", ) - process_instance_data_dict = json.loads(process_instance_data.runtime_json) if "new_task_data" in body: new_task_data_str: str = body["new_task_data"] new_task_data_dict = json.loads(new_task_data_str) - if task_id in process_instance_data_dict["tasks"]: - process_instance_data_dict["tasks"][task_id]["data"] = new_task_data_dict - process_instance_data.runtime_json = json.dumps(process_instance_data_dict) - db.session.add(process_instance_data) - try: - db.session.commit() - except Exception as e: - db.session.rollback() - raise ApiError( - error_code="update_task_data_error", - message=f"Could not update the Instance. Original error is {e}", - ) from e - else: + json_data_dict = TaskService.update_task_data_on_task_model( + task_model, new_task_data_dict, "json_data_hash" + ) + if json_data_dict is not None: + json_data = JsonDataModel(**json_data_dict) + db.session.add(json_data) + ProcessInstanceProcessor.add_event_to_process_instance( + process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_id + ) + try: + db.session.commit() + except Exception as e: + db.session.rollback() raise ApiError( error_code="update_task_data_error", - message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.", - ) + message=f"Could not update the Instance. Original error is {e}", + ) from e else: raise ApiError( error_code="update_task_data_error", diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index e82a1aa9..23d71442 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -266,8 +266,9 @@ def process_instance_log_list( ) logs = ( - log_query.order_by(ProcessInstanceEventModel.timestamp.desc(), - ProcessInstanceEventModel.id.desc()) # type: ignore + log_query.order_by( + ProcessInstanceEventModel.timestamp.desc(), ProcessInstanceEventModel.id.desc() # type: ignore + ) .outerjoin(UserModel, UserModel.id == ProcessInstanceEventModel.user_id) .add_columns( TaskModel.guid.label("spiff_task_guid"), # type: ignore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 0338aee3..ecc3c9f8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1,7 +1,5 @@ """Process_instance_processor.""" import _strptime # type: ignore -from flask import g -from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel, ProcessInstanceEventType import decimal import json import logging @@ -25,6 +23,7 @@ from uuid import UUID import dateparser import pytz from flask import current_app +from flask import g from lxml import etree # type: ignore from lxml.etree import XMLSyntaxError # type: ignore from RestrictedPython import safe_globals # type: ignore @@ -75,6 +74,8 @@ from spiffworkflow_backend.models.message_instance_correlation import ( ) from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.models.process_instance_metadata import ( ProcessInstanceMetadataModel, ) @@ -1240,6 +1241,7 @@ class ProcessInstanceProcessor: def manual_complete_task(self, task_id: str, execute: bool) -> None: """Mark the task complete optionally executing it.""" spiff_task = self.bpmn_process_instance.get_task(UUID(task_id)) + event_type = ProcessInstanceEventType.task_skipped.value if execute: current_app.logger.info( f"Manually executing Task {spiff_task.task_spec.name} of process" @@ -1255,6 +1257,7 @@ class ProcessInstanceProcessor: break else: spiff_task.complete() + event_type = ProcessInstanceEventType.task_executed_manually.value else: spiff_logger = logging.getLogger("spiff") spiff_logger.info(f"Skipped task {spiff_task.task_spec.name}", extra=spiff_task.log_info()) @@ -1275,6 +1278,7 @@ class ProcessInstanceProcessor: self.increment_spiff_step() self.add_step() + self.add_event_to_process_instance(self.process_instance_model, event_type, task_guid=task_id) self.save() # Saving the workflow seems to reset the status self.suspend() @@ -1813,8 +1817,9 @@ class ProcessInstanceProcessor: json_data = JsonDataModel(**json_data_dict) db.session.add(json_data) - self.add_event_to_process_instance(self.process_instance_model, - ProcessInstanceEventType.task_completed.value, task_guid=task_model.guid) + self.add_event_to_process_instance( + self.process_instance_model, ProcessInstanceEventType.task_completed.value, task_guid=task_model.guid + ) # this is the thing that actually commits the db transaction (on behalf of the other updates above as well) self.save() @@ -1940,33 +1945,39 @@ class ProcessInstanceProcessor: self.save() self.process_instance_model.status = "terminated" db.session.add(self.process_instance_model) - self.add_event_to_process_instance(self.process_instance_model, - ProcessInstanceEventType.process_instance_terminated.value) + self.add_event_to_process_instance( + self.process_instance_model, ProcessInstanceEventType.process_instance_terminated.value + ) db.session.commit() def suspend(self) -> None: """Suspend.""" self.process_instance_model.status = ProcessInstanceStatus.suspended.value db.session.add(self.process_instance_model) - self.add_event_to_process_instance(self.process_instance_model, - ProcessInstanceEventType.process_instance_suspended.value) + self.add_event_to_process_instance( + self.process_instance_model, ProcessInstanceEventType.process_instance_suspended.value + ) db.session.commit() def resume(self) -> None: """Resume.""" self.process_instance_model.status = ProcessInstanceStatus.waiting.value db.session.add(self.process_instance_model) - self.add_event_to_process_instance(self.process_instance_model, - ProcessInstanceEventType.process_instance_resumed.value) + self.add_event_to_process_instance( + self.process_instance_model, ProcessInstanceEventType.process_instance_resumed.value + ) db.session.commit() @classmethod - def add_event_to_process_instance(cls, process_instance: ProcessInstanceModel, event_type: str, task_guid: Optional[str] = None) -> None: + def add_event_to_process_instance( + cls, process_instance: ProcessInstanceModel, event_type: str, task_guid: Optional[str] = None + ) -> None: user_id = None if g.user: user_id = g.user.id process_instance_event = ProcessInstanceEventModel( - process_instance_id=process_instance.id, event_type=event_type, timestamp=time.time(), user_id=user_id) + process_instance_id=process_instance.id, event_type=event_type, timestamp=time.time(), user_id=user_id + ) if task_guid: process_instance_event.task_guid = task_guid db.session.add(process_instance_event) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 5e7bf88c..5a03f387 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -60,8 +60,8 @@ class TaskService: python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer) task_model.properties_json = new_properties_json task_model.state = TaskStateNames[new_properties_json["state"]] - json_data_dict = cls._update_task_data_on_task_model(task_model, spiff_task_data, "json_data_hash") - python_env_dict = cls._update_task_data_on_task_model(task_model, python_env_data_dict, "python_env_data_hash") + json_data_dict = cls.update_task_data_on_task_model(task_model, spiff_task_data, "json_data_hash") + python_env_dict = cls.update_task_data_on_task_model(task_model, python_env_data_dict, "python_env_data_hash") return [json_data_dict, python_env_dict] @classmethod @@ -246,14 +246,14 @@ class TaskService: task_model.properties_json = task_properties new_task_models[task_model.guid] = task_model - json_data_dict = TaskService._update_task_data_on_task_model( + json_data_dict = TaskService.update_task_data_on_task_model( task_model, task_data_dict, "json_data_hash" ) if json_data_dict is not None: new_json_data_dicts[json_data_dict["hash"]] = json_data_dict python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer) - python_env_dict = TaskService._update_task_data_on_task_model( + python_env_dict = TaskService.update_task_data_on_task_model( task_model, python_env_data_dict, "python_env_data_hash" ) if python_env_dict is not None: @@ -274,7 +274,7 @@ class TaskService: return json_data_dict @classmethod - def _update_task_data_on_task_model( + def update_task_data_on_task_model( cls, task_model: TaskModel, task_data_dict: dict, task_model_data_column: str ) -> Optional[JsonDataDict]: task_data_json = json.dumps(task_data_dict, sort_keys=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index f7320e34..0f2a6d0d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -154,7 +154,11 @@ class TaskModelSavingDelegate(EngineStepDelegate): # which script tasks execute when READY. timestamp = task_model.end_in_seconds or task_model.start_in_seconds or time.time() process_instance_event = ProcessInstanceEventModel( - task_guid=task_model.guid, process_instance_id=self.process_instance.id, event_type=event_type, timestamp=timestamp) + task_guid=task_model.guid, + process_instance_id=self.process_instance.id, + event_type=event_type, + timestamp=timestamp, + ) self.process_instance_events[task_model.guid] = process_instance_event return task_model diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 652e07b0..36c06d23 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -381,40 +381,6 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { {lastUpdatedTimeTag} - {/* - - - Suspended at:{' '} - - - 2023-03-17 10:12:05 (by jason) - - - - - Resumed at:{' '} - - - 2023-03-17 10:13:05 (by jason) - - - - - Suspended at:{' '} - - - 2023-03-17 10:14:05 (by jason) - - - - - Terminated at:{' '} - - - 2023-03-17 10:15:05 (by jason) - - - */} Process model revision:{' '} @@ -434,43 +400,6 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { - - {/* -
- - - Suspended at:{' '} - - - 2023-03-17 10:12:05 (by jason) - - - - - Resumed at:{' '} - - - 2023-03-17 10:13:05 (by jason) - - - - - Suspended at:{' '} - - - 2023-03-17 10:14:05 (by jason) - - - - - Terminated at:{' '} - - - 2023-03-17 10:15:05 (by jason) - - - */} -
From 6abcf4cf71eae9a228d154421ed768cb27ae14a4 Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 17 Mar 2023 14:03:21 -0400 Subject: [PATCH 051/162] fixed tests --- .../models/process_instance.py | 1 + .../services/process_instance_processor.py | 14 ++++++++++---- .../services/workflow_execution_service.py | 5 +++-- 3 files changed, 14 insertions(+), 6 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py index 6f1ec1b6..20492ce2 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py @@ -67,6 +67,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): bpmn_process_id: int | None = db.Column(ForeignKey(BpmnProcessModel.id), nullable=True) # type: ignore bpmn_process = relationship(BpmnProcessModel, cascade="delete") tasks = relationship("TaskModel", cascade="delete") # type: ignore + process_instance_events = relationship("ProcessInstanceEventModel", cascade="delete") # type: ignore spiff_serializer_version = db.Column(db.String(50), nullable=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index ecc3c9f8..83500ca8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1818,7 +1818,10 @@ class ProcessInstanceProcessor: db.session.add(json_data) self.add_event_to_process_instance( - self.process_instance_model, ProcessInstanceEventType.task_completed.value, task_guid=task_model.guid + self.process_instance_model, + ProcessInstanceEventType.task_completed.value, + task_guid=task_model.guid, + user_id=user.id, ) # this is the thing that actually commits the db transaction (on behalf of the other updates above as well) @@ -1970,10 +1973,13 @@ class ProcessInstanceProcessor: @classmethod def add_event_to_process_instance( - cls, process_instance: ProcessInstanceModel, event_type: str, task_guid: Optional[str] = None + cls, + process_instance: ProcessInstanceModel, + event_type: str, + task_guid: Optional[str] = None, + user_id: Optional[int] = None, ) -> None: - user_id = None - if g.user: + if user_id is None and hasattr(g, "user") and g.user: user_id = g.user.id process_instance_event = ProcessInstanceEventModel( process_instance_id=process_instance.id, event_type=event_type, timestamp=time.time(), user_id=user_id diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 0f2a6d0d..4d44308b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -18,6 +18,7 @@ from spiffworkflow_backend.models.message_instance_correlation import ( ) from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.task import TaskModel # noqa: F401 from spiffworkflow_backend.services.assertion_service import safe_assertion @@ -146,9 +147,9 @@ class TaskModelSavingDelegate(EngineStepDelegate): self._update_json_data_dicts_using_list(json_data_dict_list) if task_model.state == "COMPLETED" or task_failed: - event_type = "task_completed" + event_type = ProcessInstanceEventType.task_completed.value if task_failed: - event_type = "task_errored" + event_type = ProcessInstanceEventType.task_failed.value # FIXME: some failed tasks will currently not have either timestamp since we only hook into spiff when tasks complete # which script tasks execute when READY. From a565b96cba8478bbd6e629afc9ba58ff701a215c Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 17 Mar 2023 15:35:27 -0400 Subject: [PATCH 052/162] tasks can be skipped and manually executed w/ burnettk --- .../services/process_instance_processor.py | 75 +++++++++++++------ .../integration/test_process_api.py | 12 +++ 2 files changed, 64 insertions(+), 23 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 83500ca8..ea59c414 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1066,7 +1066,22 @@ class ProcessInstanceProcessor: db.session.add(bpmn_process_definition_relationship) return bpmn_process_definition - def _add_bpmn_process_definitions(self, bpmn_spec_dict: dict) -> None: + def _add_bpmn_process_definitions(self) -> None: + """Adds serialized_bpmn_definition records to the db session. + + Expects the calling method to commit it. + """ + if self.process_instance_model.bpmn_process_definition_id is not None: + return None + + # we may have to already process bpmn_defintions if we ever care about the Root task again + bpmn_dict = self.serialize() + bpmn_dict_keys = ("spec", "subprocess_specs", "serializer_version") + bpmn_spec_dict = {} + for bpmn_key in bpmn_dict.keys(): + if bpmn_key in bpmn_dict_keys: + bpmn_spec_dict[bpmn_key] = bpmn_dict[bpmn_key] + # store only if mappings is currently empty. this also would mean this is a new instance that has never saved before store_bpmn_definition_mappings = not self.bpmn_definition_to_task_definitions_mappings bpmn_process_definition_parent = self._store_bpmn_process_definition( @@ -1081,27 +1096,6 @@ class ProcessInstanceProcessor: ) self.process_instance_model.bpmn_process_definition = bpmn_process_definition_parent - def _add_bpmn_process_defintions(self) -> None: - """Adds serialized_bpmn_definition records to the db session. - - Expects the calling method to commit it. - """ - if self.process_instance_model.bpmn_process_definition_id is not None: - return None - - # we may have to already process bpmn_defintions if we ever care about the Root task again - bpmn_dict = self.serialize() - bpmn_dict_keys = ("spec", "subprocess_specs", "serializer_version") - process_instance_data_dict = {} - bpmn_spec_dict = {} - for bpmn_key in bpmn_dict.keys(): - if bpmn_key in bpmn_dict_keys: - bpmn_spec_dict[bpmn_key] = bpmn_dict[bpmn_key] - else: - process_instance_data_dict[bpmn_key] = bpmn_dict[bpmn_key] - - self._add_bpmn_process_definitions(bpmn_spec_dict) - def save(self) -> None: """Saves the current state of this processor to the database.""" self.process_instance_model.spiff_serializer_version = self.SERIALIZER_VERSION @@ -1240,6 +1234,7 @@ class ProcessInstanceProcessor: def manual_complete_task(self, task_id: str, execute: bool) -> None: """Mark the task complete optionally executing it.""" + spiff_tasks_updated = {} spiff_task = self.bpmn_process_instance.get_task(UUID(task_id)) event_type = ProcessInstanceEventType.task_skipped.value if execute: @@ -1253,10 +1248,14 @@ class ProcessInstanceProcessor: # We have to get to the actual start event for task in self.bpmn_process_instance.get_tasks(workflow=subprocess): task.complete() + spiff_tasks_updated[task.id] = task if isinstance(task.task_spec, StartEvent): break else: spiff_task.complete() + spiff_tasks_updated[spiff_task.id] = spiff_task + for child in spiff_task.children: + spiff_tasks_updated[child.id] = child event_type = ProcessInstanceEventType.task_executed_manually.value else: spiff_logger = logging.getLogger("spiff") @@ -1264,20 +1263,50 @@ class ProcessInstanceProcessor: spiff_task._set_state(TaskState.COMPLETED) for child in spiff_task.children: child.task_spec._update(child) + spiff_tasks_updated[child.id] = child spiff_task.workflow.last_task = spiff_task + spiff_tasks_updated[spiff_task.id] = spiff_task if isinstance(spiff_task.task_spec, EndEvent): for task in self.bpmn_process_instance.get_tasks(TaskState.DEFINITE_MASK, workflow=spiff_task.workflow): task.complete() + spiff_tasks_updated[task.id] = task # A subworkflow task will become ready when its workflow is complete. Engine steps would normally # then complete it, but we have to do it ourselves here. for task in self.bpmn_process_instance.get_tasks(TaskState.READY): if isinstance(task.task_spec, SubWorkflowTask): task.complete() + spiff_tasks_updated[task.id] = task self.increment_spiff_step() self.add_step() + + for updated_spiff_task in spiff_tasks_updated.values(): + bpmn_process, task_model, new_task_models, new_json_data_dicts = ( + TaskService.find_or_create_task_model_from_spiff_task( + updated_spiff_task, + self.process_instance_model, + self._serializer, + bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, + ) + ) + bpmn_process_to_use = bpmn_process or task_model.bpmn_process + bpmn_process_json_data = TaskService.update_task_data_on_bpmn_process( + bpmn_process_to_use, updated_spiff_task.workflow.data + ) + db.session.add(bpmn_process_to_use) + json_data_dict_list = TaskService.update_task_model(task_model, updated_spiff_task, self._serializer) + for json_data_dict in json_data_dict_list: + if json_data_dict is not None: + new_json_data_dicts[json_data_dict["hash"]] = json_data_dict + if bpmn_process_json_data is not None: + new_json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data + + new_task_models[task_model.guid] = task_model + db.session.bulk_save_objects(new_task_models.values()) + TaskService.insert_or_update_json_data_records(new_json_data_dicts) + self.add_event_to_process_instance(self.process_instance_model, event_type, task_guid=task_id) self.save() # Saving the workflow seems to reset the status @@ -1582,7 +1611,7 @@ class ProcessInstanceProcessor: self._script_engine.environment.revise_state_with_task_data(task) return self.spiff_step_details_mapping(task, start, end) - self._add_bpmn_process_defintions() + self._add_bpmn_process_definitions() step_delegate = StepDetailLoggingDelegate(self.increment_spiff_step, spiff_step_details_mapping_builder) task_model_delegate = TaskModelSavingDelegate( diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 858f2bcb..600bcb66 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -31,6 +31,7 @@ from spiffworkflow_backend.models.process_instance_report import ( from spiffworkflow_backend.models.process_model import NotificationType from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema from spiffworkflow_backend.models.spec_reference import SpecReferenceCache +from spiffworkflow_backend.models.task import TaskModel # noqa: F401 from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.file_system_service import FileSystemService @@ -2694,8 +2695,19 @@ class TestProcessApi(BaseTest): f"/v1.0/task-complete/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{task['id']}", headers=self.logged_in_headers(with_super_admin_user), content_type="application/json", + data=json.dumps({"execute": False}), ) assert response.json["status"] == "suspended" + task_model = TaskModel.query.filter_by(guid=task["id"]).first() + assert task_model is not None + assert task_model.state == "COMPLETED" + + response = client.get( + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/task-info", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert response.status_code == 200 + assert len(response.json) == 1 def setup_initial_groups_for_move_tests(self, client: FlaskClient, with_super_admin_user: UserModel) -> None: """Setup_initial_groups_for_move_tests.""" From 714a2aa2c1ee0f5fafb434be534a744c9eb9ed90 Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 17 Mar 2023 15:49:16 -0400 Subject: [PATCH 053/162] removed spiff_logging w/ burnettk --- .../load_database_models.py | 1 - .../models/spiff_logging.py | 25 ------ .../routes/process_instances_controller.py | 10 --- .../services/logging_service.py | 80 ------------------- .../unit/test_process_instance_service.py | 33 -------- .../unit/test_spiff_logging.py | 42 ---------- 6 files changed, 191 deletions(-) delete mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_logging.py delete mode 100644 spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_spiff_logging.py diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py b/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py index 4b547158..52e0c573 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py @@ -41,7 +41,6 @@ from spiffworkflow_backend.models.process_instance_report import ( ) # noqa: F401 from spiffworkflow_backend.models.refresh_token import RefreshTokenModel # noqa: F401 from spiffworkflow_backend.models.secret_model import SecretModel # noqa: F401 -from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel # noqa: F401 from spiffworkflow_backend.models.spiff_step_details import ( SpiffStepDetailsModel, ) # noqa: F401 diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_logging.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_logging.py deleted file mode 100644 index 854982b3..00000000 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_logging.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Spiff_logging.""" -from dataclasses import dataclass -from typing import Optional - -from spiffworkflow_backend.models.db import db -from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel - - -@dataclass -class SpiffLoggingModel(SpiffworkflowBaseDBModel): - """SpiffLoggingModel.""" - - __tablename__ = "spiff_logging" - id: int = db.Column(db.Integer, primary_key=True) - process_instance_id: int = db.Column(db.Integer, nullable=False) - bpmn_process_identifier: str = db.Column(db.String(255), nullable=False) - bpmn_process_name: Optional[str] = db.Column(db.String(255), nullable=True) - bpmn_task_identifier: str = db.Column(db.String(255), nullable=False) - bpmn_task_name: str = db.Column(db.String(255), nullable=True) - bpmn_task_type: str = db.Column(db.String(255), nullable=True) - spiff_task_guid: str = db.Column(db.String(50), nullable=False) - timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False) - message: Optional[str] = db.Column(db.String(255), nullable=True) - current_user_id: int = db.Column(db.Integer, nullable=True) - spiff_step: int = db.Column(db.Integer, nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 23d71442..93e78389 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -42,7 +42,6 @@ from spiffworkflow_backend.models.process_instance_report import ( from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError -from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.task import Task from spiffworkflow_backend.models.task import TaskModel @@ -251,14 +250,6 @@ def process_instance_log_list( ) if not detailed: log_query = log_query.filter( - # 1. this was the previous implementation, where we only show completed tasks and skipped tasks. - # maybe we want to iterate on this in the future (in a third tab under process instance logs?) - # or_( - # SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore - # SpiffLoggingModel.message.like("Skipped task %"), # type: ignore - # ) - # 2. We included ["End Event", "Default Start Event"] along with Default Throwing Event, but feb 2023 - # we decided to remove them, since they get really chatty when there are lots of subprocesses and call activities. and_( TaskModel.state.in_(["COMPLETED"]), # type: ignore TaskDefinitionModel.typename.in_(["IntermediateThrowEvent"]), # type: ignore @@ -458,7 +449,6 @@ def process_instance_delete( # (Pdb) db.session.delete # > - db.session.query(SpiffLoggingModel).filter_by(process_instance_id=process_instance.id).delete() db.session.query(SpiffStepDetailsModel).filter_by(process_instance_id=process_instance.id).delete() db.session.query(ProcessInstanceQueueModel).filter_by(process_instance_id=process_instance.id).delete() db.session.delete(process_instance) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py index e2f58e29..94f3a67f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py @@ -9,10 +9,6 @@ from typing import Optional from flask import g from flask.app import Flask -from spiffworkflow_backend.models.db import db -from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel -from spiffworkflow_backend.models.task import Task - # flask logging formats: # from: https://www.askpython.com/python-modules/flask/flask-logging @@ -181,79 +177,3 @@ def setup_logger(app: Flask) -> None: for the_handler in the_logger.handlers: the_handler.setFormatter(log_formatter) the_handler.setLevel(log_level) - - spiff_logger = logging.getLogger("spiff") - spiff_logger.setLevel(spiff_log_level) - spiff_formatter = logging.Formatter( - "%(asctime)s | %(levelname)s | %(message)s | %(action)s | %(task_type)s |" - " %(process)s | %(processName)s | %(process_instance_id)s" - ) - - # if you add a handler to spiff, it will be used/inherited by spiff.metrics - # if you add a filter to the spiff logger directly (and not the handler), it will NOT be inherited by spiff.metrics - # so put filters on handlers. - db_handler = DBHandler() - db_handler.setLevel(spiff_log_level) - db_handler.setFormatter(spiff_formatter) - db_handler.addFilter(SpiffFilter(app)) - spiff_logger.addHandler(db_handler) - - -# https://9to5answer.com/python-logging-to-database -class DBHandler(logging.Handler): - """DBHandler.""" - - def __init__(self) -> None: - """__init__.""" - self.logs: list[dict] = [] - super().__init__() - - def bulk_insert_logs(self) -> None: - """Bulk_insert_logs.""" - db.session.bulk_insert_mappings(SpiffLoggingModel, self.logs) - db.session.commit() - self.logs = [] - - def emit(self, record: logging.LogRecord) -> None: - """Emit.""" - # if we do not have a process instance id then do not log and assume we are running a script unit test - # that initializes a BpmnWorkflow without a process instance - if record and record.process_instance_id: # type: ignore - bpmn_process_identifier = record.workflow_spec # type: ignore - bpmn_process_name = record.workflow_name # type: ignore - spiff_task_guid = str(record.task_id) # type: ignore - bpmn_task_identifier = str(record.task_spec) # type: ignore - bpmn_task_name = record.task_name if hasattr(record, "task_name") else None # type: ignore - bpmn_task_type = record.task_type if hasattr(record, "task_type") else None # type: ignore - timestamp = record.created - message = record.msg if hasattr(record, "msg") else None - - current_user_id = None - if bpmn_task_type in Task.HUMAN_TASK_TYPES and hasattr(record, "current_user_id"): - current_user_id = record.current_user_id # type: ignore - - spiff_step = ( - record.spiff_step # type: ignore - if hasattr(record, "spiff_step") and record.spiff_step is not None # type: ignore - else 1 - ) - self.logs.append( - { - "process_instance_id": record.process_instance_id, # type: ignore - "bpmn_process_identifier": bpmn_process_identifier, - "bpmn_process_name": bpmn_process_name, - "spiff_task_guid": spiff_task_guid, - "bpmn_task_name": bpmn_task_name, - "bpmn_task_identifier": bpmn_task_identifier, - "bpmn_task_type": bpmn_task_type, - "message": message, - "timestamp": timestamp, - "current_user_id": current_user_id, - "spiff_step": spiff_step, - } - ) - # so at some point we are going to insert logs. - # we don't want to insert on every log, so we will insert every 100 logs, which is just about as fast as inserting - # on every 1,000 logs. if we get deadlocks in the database, this can be changed to 1 in order to insert on every log. - if len(self.logs) >= 100: - self.bulk_insert_logs() diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_service.py index 83f19a69..436810cc 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_service.py @@ -3,16 +3,11 @@ from typing import Optional from flask.app import Flask from tests.spiffworkflow_backend.helpers.base_test import BaseTest -from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from spiffworkflow_backend.models.process_instance_file_data import ( ProcessInstanceFileDataModel, ) -from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel from spiffworkflow_backend.models.user import UserModel -from spiffworkflow_backend.services.process_instance_processor import ( - ProcessInstanceProcessor, -) from spiffworkflow_backend.services.process_instance_service import ( ProcessInstanceService, ) @@ -194,31 +189,3 @@ class TestProcessInstanceService(BaseTest): ], "not_a_file3": "just a value3", } - - def test_does_not_log_set_data_when_calling_engine_steps_on_waiting_call_activity( - self, - app: Flask, - with_db_and_bpmn_file_cleanup: None, - with_super_admin_user: UserModel, - ) -> None: - """Test_does_not_log_set_data_when_calling_engine_steps_on_waiting_call_activity.""" - process_model = load_test_spec( - process_model_id="test_group/call-activity-to-human-task", - process_model_source_directory="call-activity-to-human-task", - ) - process_instance = self.create_process_instance_from_process_model( - process_model=process_model, user=with_super_admin_user - ) - processor = ProcessInstanceProcessor(process_instance) - processor.do_engine_steps(save=True) - - process_instance_logs = SpiffLoggingModel.query.filter_by(process_instance_id=process_instance.id).all() - initial_length = len(process_instance_logs) - - # ensure we have something in the logs - assert initial_length > 0 - - # logs should NOT increase after running this a second time since it's just waiting on a human task - processor.do_engine_steps(save=True) - process_instance_logs = SpiffLoggingModel.query.filter_by(process_instance_id=process_instance.id).all() - assert len(process_instance_logs) == initial_length diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_spiff_logging.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_spiff_logging.py deleted file mode 100644 index 9c8b4841..00000000 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_spiff_logging.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Process Model.""" -from decimal import Decimal - -from flask.app import Flask -from tests.spiffworkflow_backend.helpers.base_test import BaseTest -from tests.spiffworkflow_backend.helpers.test_data import load_test_spec - -from spiffworkflow_backend.models.db import db -from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel - - -class TestSpiffLogging(BaseTest): - """TestSpiffLogging.""" - - def test_timestamps_are_stored_correctly(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None: - """Test_timestamps_are_stored_correctly.""" - process_model = load_test_spec( - "call_activity_test", - process_model_source_directory="call_activity_same_directory", - ) - - process_instance = self.create_process_instance_from_process_model(process_model) - bpmn_process_identifier = "test_process_identifier" - spiff_task_guid = "test_spiff_task_guid" - bpmn_task_identifier = "test_bpmn_task_identifier" - timestamp = 1663250624.664887 # actual timestamp from spiff logs - message = "test_message" - spiff_log = SpiffLoggingModel( - process_instance_id=process_instance.id, - bpmn_process_identifier=bpmn_process_identifier, - spiff_task_guid=spiff_task_guid, - bpmn_task_identifier=bpmn_task_identifier, - message=message, - timestamp=timestamp, - spiff_step=1, - ) - assert spiff_log.timestamp == timestamp - - db.session.add(spiff_log) - db.session.commit() - - assert spiff_log.timestamp == Decimal(str(timestamp)) From 72b5b4caf22477bc7978e356b2770c69d7fb1a9d Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 17 Mar 2023 15:50:05 -0400 Subject: [PATCH 054/162] recreated migrations to remove spiff logging w/ burnettk --- spiffworkflow-backend/migrations/env.py | 2 ++ .../{05153ab6a6b8_.py => 2596a98f760b_.py} | 22 +++---------------- 2 files changed, 5 insertions(+), 19 deletions(-) rename spiffworkflow-backend/migrations/versions/{05153ab6a6b8_.py => 2596a98f760b_.py} (96%) diff --git a/spiffworkflow-backend/migrations/env.py b/spiffworkflow-backend/migrations/env.py index 630e381a..68feded2 100644 --- a/spiffworkflow-backend/migrations/env.py +++ b/spiffworkflow-backend/migrations/env.py @@ -1,3 +1,5 @@ +from __future__ import with_statement + import logging from logging.config import fileConfig diff --git a/spiffworkflow-backend/migrations/versions/05153ab6a6b8_.py b/spiffworkflow-backend/migrations/versions/2596a98f760b_.py similarity index 96% rename from spiffworkflow-backend/migrations/versions/05153ab6a6b8_.py rename to spiffworkflow-backend/migrations/versions/2596a98f760b_.py index 5ee8fda7..75fa9d21 100644 --- a/spiffworkflow-backend/migrations/versions/05153ab6a6b8_.py +++ b/spiffworkflow-backend/migrations/versions/2596a98f760b_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: 05153ab6a6b8 +Revision ID: 2596a98f760b Revises: -Create Date: 2023-03-17 12:22:43.449203 +Create Date: 2023-03-17 15:49:31.968141 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. -revision = '05153ab6a6b8' +revision = '2596a98f760b' down_revision = None branch_labels = None depends_on = None @@ -87,21 +87,6 @@ def upgrade(): op.create_index(op.f('ix_spec_reference_cache_display_name'), 'spec_reference_cache', ['display_name'], unique=False) op.create_index(op.f('ix_spec_reference_cache_identifier'), 'spec_reference_cache', ['identifier'], unique=False) op.create_index(op.f('ix_spec_reference_cache_type'), 'spec_reference_cache', ['type'], unique=False) - op.create_table('spiff_logging', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('process_instance_id', sa.Integer(), nullable=False), - sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=False), - sa.Column('bpmn_process_name', sa.String(length=255), nullable=True), - sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False), - sa.Column('bpmn_task_name', sa.String(length=255), nullable=True), - sa.Column('bpmn_task_type', sa.String(length=255), nullable=True), - sa.Column('spiff_task_guid', sa.String(length=50), nullable=False), - sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False), - sa.Column('message', sa.String(length=255), nullable=True), - sa.Column('current_user_id', sa.Integer(), nullable=True), - sa.Column('spiff_step', sa.Integer(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) op.create_table('user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('username', sa.String(length=255), nullable=False), @@ -465,7 +450,6 @@ def downgrade(): op.drop_index(op.f('ix_bpmn_process_guid'), table_name='bpmn_process') op.drop_table('bpmn_process') op.drop_table('user') - op.drop_table('spiff_logging') op.drop_index(op.f('ix_spec_reference_cache_type'), table_name='spec_reference_cache') op.drop_index(op.f('ix_spec_reference_cache_identifier'), table_name='spec_reference_cache') op.drop_index(op.f('ix_spec_reference_cache_display_name'), table_name='spec_reference_cache') From bd5e8d1c68fa39aed0c8aaf3083f6f95d3b68bd7 Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 17 Mar 2023 15:57:00 -0400 Subject: [PATCH 055/162] removed some notes w/ burnettk --- .../src/routes/ProcessInstanceLogList.tsx | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index 365ab7f1..d4c01dc2 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -191,16 +191,6 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { > Events - {/* - Suspend - Resumed - Terminated - - Skipped? - Rewind? - Execute? - Edit? - */}
From 310c9535cfe9a3242095985c7b69600c67f619f7 Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 17 Mar 2023 16:15:09 -0400 Subject: [PATCH 056/162] some updates to db indexes w/ burnettk --- .../models/message_instance_correlation.py | 2 +- .../src/spiffworkflow_backend/models/process_instance.py | 4 ++-- .../src/spiffworkflow_backend/models/task_definition.py | 2 +- .../src/spiffworkflow_backend/models/user.py | 7 ++++--- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance_correlation.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance_correlation.py index 92ab8f14..755e6563 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance_correlation.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance_correlation.py @@ -30,7 +30,7 @@ class MessageInstanceCorrelationRuleModel(SpiffworkflowBaseDBModel): id = db.Column(db.Integer, primary_key=True) message_instance_id = db.Column(ForeignKey(MessageInstanceModel.id), nullable=False, index=True) # type: ignore - name: str = db.Column(db.String(50), nullable=False) + name: str = db.Column(db.String(50), nullable=False, index=True) retrieval_expression: str = db.Column(db.String(255)) updated_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py index 20492ce2..138aac26 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py @@ -61,7 +61,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): process_initiator = relationship("UserModel") bpmn_process_definition_id: int | None = db.Column( - ForeignKey(BpmnProcessDefinitionModel.id), nullable=True # type: ignore + ForeignKey(BpmnProcessDefinitionModel.id), nullable=True, index=True # type: ignore ) bpmn_process_definition = relationship(BpmnProcessDefinitionModel) bpmn_process_id: int | None = db.Column(ForeignKey(BpmnProcessModel.id), nullable=True) # type: ignore @@ -90,7 +90,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): ) # type: ignore bpmn_json: str | None = deferred(db.Column(db.JSON)) # type: ignore - start_in_seconds: int | None = db.Column(db.Integer) + start_in_seconds: int | None = db.Column(db.Integer, index=True) end_in_seconds: int | None = db.Column(db.Integer) updated_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py index 6868f2cf..893331a8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py @@ -29,9 +29,9 @@ class TaskDefinitionModel(SpiffworkflowBaseDBModel): bpmn_identifier: str = db.Column(db.String(255), nullable=False, index=True) bpmn_name: str = db.Column(db.String(255), nullable=True, index=True) + typename: str = db.Column(db.String(255), nullable=False, index=True) properties_json: dict = db.Column(db.JSON, nullable=False) - typename: str = db.Column(db.String(255), nullable=False) updated_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py index 4b55e8b6..1f667e0a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py @@ -28,11 +28,12 @@ class UserModel(SpiffworkflowBaseDBModel): id: int = db.Column(db.Integer, primary_key=True) username: str = db.Column(db.String(255), nullable=False, unique=True) + email = db.Column(db.String(255), index=True) + + service = db.Column(db.String(255), nullable=False, unique=False, index=True) # not 'openid' -- google, aws + service_id = db.Column(db.String(255), nullable=False, unique=False, index=True) - service = db.Column(db.String(255), nullable=False, unique=False) # not 'openid' -- google, aws - service_id = db.Column(db.String(255), nullable=False, unique=False) display_name = db.Column(db.String(255)) - email = db.Column(db.String(255)) tenant_specific_field_1: str | None = db.Column(db.String(255)) tenant_specific_field_2: str | None = db.Column(db.String(255)) tenant_specific_field_3: str | None = db.Column(db.String(255)) From 8b31f7379773f0a005681f9202139b07c7180a1b Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 17 Mar 2023 16:26:35 -0400 Subject: [PATCH 057/162] added indexes to foreign key fields w/ burnettk --- .../src/spiffworkflow_backend/models/bpmn_process.py | 4 ++-- .../models/bpmn_process_definition_relationship.py | 4 ++-- .../src/spiffworkflow_backend/models/human_task.py | 10 +++++----- .../spiffworkflow_backend/models/message_instance.py | 4 ++-- .../models/permission_assignment.py | 4 ++-- .../spiffworkflow_backend/models/process_instance.py | 4 ++-- .../models/process_instance_event.py | 2 +- .../models/process_instance_file_data.py | 2 +- .../models/process_instance_metadata.py | 2 +- .../models/process_instance_queue.py | 2 +- .../src/spiffworkflow_backend/models/secret_model.py | 2 +- .../spiffworkflow_backend/models/spiff_step_details.py | 2 +- .../src/spiffworkflow_backend/models/task.py | 6 +++--- .../spiffworkflow_backend/models/task_definition.py | 2 +- .../models/user_group_assignment.py | 4 ++-- .../models/user_group_assignment_waiting.py | 2 +- 16 files changed, 28 insertions(+), 28 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py index 24ccbe28..3d6a74da 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py @@ -20,11 +20,11 @@ class BpmnProcessModel(SpiffworkflowBaseDBModel): guid: str | None = db.Column(db.String(36), nullable=True, unique=True, index=True) bpmn_process_definition_id: int = db.Column( - ForeignKey(BpmnProcessDefinitionModel.id), nullable=False # type: ignore + ForeignKey(BpmnProcessDefinitionModel.id), nullable=False, index=True # type: ignore ) bpmn_process_definition = relationship(BpmnProcessDefinitionModel) - parent_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True) + parent_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True, index=True) properties_json: dict = db.Column(db.JSON, nullable=False) json_data_hash: str = db.Column(db.String(255), nullable=False, index=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition_relationship.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition_relationship.py index 5ab4a7a2..096570d8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition_relationship.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition_relationship.py @@ -22,8 +22,8 @@ class BpmnProcessDefinitionRelationshipModel(SpiffworkflowBaseDBModel): id: int = db.Column(db.Integer, primary_key=True) bpmn_process_definition_parent_id: int = db.Column( - ForeignKey(BpmnProcessDefinitionModel.id), nullable=False # type: ignore + ForeignKey(BpmnProcessDefinitionModel.id), nullable=False, index=True # type: ignore ) bpmn_process_definition_child_id: int = db.Column( - ForeignKey(BpmnProcessDefinitionModel.id), nullable=False # type: ignore + ForeignKey(BpmnProcessDefinitionModel.id), nullable=False, index=True # type: ignore ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py index c747930d..4e0d2ffc 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py @@ -29,13 +29,13 @@ class HumanTaskModel(SpiffworkflowBaseDBModel): __tablename__ = "human_task" id: int = db.Column(db.Integer, primary_key=True) - process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore - lane_assignment_id: int | None = db.Column(ForeignKey(GroupModel.id)) - completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True) # type: ignore + process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False, index=True) # type: ignore + lane_assignment_id: int | None = db.Column(ForeignKey(GroupModel.id), index=True) + completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True, index=True) # type: ignore completed_by_user = relationship("UserModel", foreign_keys=[completed_by_user_id], viewonly=True) - actual_owner_id: int = db.Column(ForeignKey(UserModel.id)) # type: ignore + actual_owner_id: int = db.Column(ForeignKey(UserModel.id), index=True) # type: ignore # actual_owner: RelationshipProperty[UserModel] = relationship(UserModel) form_file_name: str | None = db.Column(db.String(50)) @@ -45,7 +45,7 @@ class HumanTaskModel(SpiffworkflowBaseDBModel): created_at_in_seconds: int = db.Column(db.Integer) # task_id came first which is why it's a string and task_model_id is the int and foreignkey - task_model_id: int = db.Column(ForeignKey(TaskModel.id), nullable=True) # type: ignore + task_model_id: int = db.Column(ForeignKey(TaskModel.id), nullable=True, index=True) # type: ignore task_id: str = db.Column(db.String(50)) task_name: str = db.Column(db.String(255)) task_title: str = db.Column(db.String(50)) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance.py index 31de7cd4..3a4735ac 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance.py @@ -47,7 +47,7 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel): __tablename__ = "message_instance" id: int = db.Column(db.Integer, primary_key=True) - process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=True) # type: ignore + process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=True, index=True) # type: ignore name: str = db.Column(db.String(255)) message_type: str = db.Column(db.String(20), nullable=False) # Only Send Messages have a payload @@ -55,7 +55,7 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel): # The correlation keys of the process at the time the message was created. correlation_keys: dict = db.Column(db.JSON) status: str = db.Column(db.String(20), nullable=False, default="ready") - user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True) # type: ignore + user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True, index=True) # type: ignore user = relationship("UserModel") counterpart_id: int = db.Column( db.Integer diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_assignment.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_assignment.py index 01d4b935..f77e0c0e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_assignment.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_assignment.py @@ -46,8 +46,8 @@ class PermissionAssignmentModel(SpiffworkflowBaseDBModel): ), ) id = db.Column(db.Integer, primary_key=True) - principal_id = db.Column(ForeignKey(PrincipalModel.id), nullable=False) - permission_target_id = db.Column(ForeignKey(PermissionTargetModel.id), nullable=False) # type: ignore + principal_id = db.Column(ForeignKey(PrincipalModel.id), nullable=False, index=True) + permission_target_id = db.Column(ForeignKey(PermissionTargetModel.id), nullable=False, index=True) # type: ignore grant_type = db.Column(db.String(50), nullable=False) permission = db.Column(db.String(50), nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py index 138aac26..e312d2cd 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py @@ -57,14 +57,14 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): id: int = db.Column(db.Integer, primary_key=True) process_model_identifier: str = db.Column(db.String(255), nullable=False, index=True) process_model_display_name: str = db.Column(db.String(255), nullable=False, index=True) - process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore + process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore process_initiator = relationship("UserModel") bpmn_process_definition_id: int | None = db.Column( ForeignKey(BpmnProcessDefinitionModel.id), nullable=True, index=True # type: ignore ) bpmn_process_definition = relationship(BpmnProcessDefinitionModel) - bpmn_process_id: int | None = db.Column(ForeignKey(BpmnProcessModel.id), nullable=True) # type: ignore + bpmn_process_id: int | None = db.Column(ForeignKey(BpmnProcessModel.id), nullable=True, index=True) # type: ignore bpmn_process = relationship(BpmnProcessModel, cascade="delete") tasks = relationship("TaskModel", cascade="delete") # type: ignore process_instance_events = relationship("ProcessInstanceEventModel", cascade="delete") # type: ignore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py index 560abffd..fe920b57 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_event.py @@ -30,7 +30,7 @@ class ProcessInstanceEventModel(SpiffworkflowBaseDBModel): # use task guid so we can bulk insert without worrying about whether or not the task has an id yet task_guid: str | None = db.Column(db.String(36), nullable=True, index=True) - process_instance_id: int = db.Column(ForeignKey("process_instance.id"), nullable=False) + process_instance_id: int = db.Column(ForeignKey("process_instance.id"), nullable=False, index=True) event_type: str = db.Column(db.String(50), nullable=False, index=True) timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False, index=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_file_data.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_file_data.py index 5d3567ad..08cc4048 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_file_data.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_file_data.py @@ -17,7 +17,7 @@ class ProcessInstanceFileDataModel(SpiffworkflowBaseDBModel): __tablename__ = "process_instance_file_data" id: int = db.Column(db.Integer, primary_key=True) - process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore + process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False, index=True) # type: ignore identifier: str = db.Column(db.String(255), nullable=False) list_index: Optional[int] = db.Column(db.Integer, nullable=True) mimetype: str = db.Column(db.String(255), nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py index b5e88ff8..1bd3c436 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py @@ -16,7 +16,7 @@ class ProcessInstanceMetadataModel(SpiffworkflowBaseDBModel): __table_args__ = (db.UniqueConstraint("process_instance_id", "key", name="process_instance_metadata_unique"),) id: int = db.Column(db.Integer, primary_key=True) - process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore + process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False, index=True) # type: ignore key: str = db.Column(db.String(255), nullable=False, index=True) value: str = db.Column(db.String(255), nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_queue.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_queue.py index c0cb9f27..9bf63c00 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_queue.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_queue.py @@ -17,7 +17,7 @@ class ProcessInstanceQueueModel(SpiffworkflowBaseDBModel): id: int = db.Column(db.Integer, primary_key=True) process_instance_id: int = db.Column( - ForeignKey(ProcessInstanceModel.id), index=True, unique=True, nullable=False # type: ignore + ForeignKey(ProcessInstanceModel.id), index=True, unique=True, nullable=False, index=True # type: ignore ) run_at_in_seconds: int = db.Column(db.Integer) priority: int = db.Column(db.Integer) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/secret_model.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/secret_model.py index 026831ed..41f72e1b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/secret_model.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/secret_model.py @@ -17,7 +17,7 @@ class SecretModel(SpiffworkflowBaseDBModel): id: int = db.Column(db.Integer, primary_key=True) key: str = db.Column(db.String(50), unique=True, nullable=False) value: str = db.Column(db.Text(), nullable=False) - user_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore + user_id: int = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore updated_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py index beed8da7..2568d742 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py @@ -19,7 +19,7 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel): __table_args__ = (UniqueConstraint("process_instance_id", "spiff_step", name="process_instance_id_spiff_step"),) id: int = db.Column(db.Integer, primary_key=True) - process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore + process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False, index=True) # type: ignore spiff_step: int = db.Column(db.Integer, nullable=False) task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore task_id: str = db.Column(db.String(50), nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index 98058071..cbd1ec65 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -49,12 +49,12 @@ class TaskModel(SpiffworkflowBaseDBModel): __tablename__ = "task" id: int = db.Column(db.Integer, primary_key=True) guid: str = db.Column(db.String(36), nullable=False, unique=True, index=True) - bpmn_process_id: int = db.Column(ForeignKey(BpmnProcessModel.id), nullable=False) # type: ignore + bpmn_process_id: int = db.Column(ForeignKey(BpmnProcessModel.id), nullable=False, index=True) # type: ignore bpmn_process = relationship(BpmnProcessModel, back_populates="tasks") - process_instance_id: int = db.Column(ForeignKey("process_instance.id"), nullable=False) + process_instance_id: int = db.Column(ForeignKey("process_instance.id"), nullable=False, index=True) # find this by looking up the "workflow_name" and "task_spec" from the properties_json - task_definition_id: int = db.Column(ForeignKey(TaskDefinitionModel.id), nullable=False) # type: ignore + task_definition_id: int = db.Column(ForeignKey(TaskDefinitionModel.id), nullable=False, index=True) # type: ignore task_definition = relationship("TaskDefinitionModel") state: str = db.Column(db.String(10), nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py index 893331a8..791e1dea 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py @@ -23,7 +23,7 @@ class TaskDefinitionModel(SpiffworkflowBaseDBModel): id: int = db.Column(db.Integer, primary_key=True) bpmn_process_definition_id: int = db.Column( - ForeignKey(BpmnProcessDefinitionModel.id), nullable=False # type: ignore + ForeignKey(BpmnProcessDefinitionModel.id), nullable=False, index=True # type: ignore ) bpmn_process_definition = relationship(BpmnProcessDefinitionModel) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment.py index 45467a81..83ceb067 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment.py @@ -15,8 +15,8 @@ class UserGroupAssignmentModel(SpiffworkflowBaseDBModel): __table_args__ = (db.UniqueConstraint("user_id", "group_id", name="user_group_assignment_unique"),) id = db.Column(db.Integer, primary_key=True) - user_id = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore - group_id = db.Column(ForeignKey(GroupModel.id), nullable=False) + user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore + group_id = db.Column(ForeignKey(GroupModel.id), nullable=False, index=True) group = relationship("GroupModel", overlaps="groups,user_group_assignments,users") # type: ignore user = relationship("UserModel", overlaps="groups,user_group_assignments,users") # type: ignore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment_waiting.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment_waiting.py index 5616728b..9b019391 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment_waiting.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment_waiting.py @@ -19,7 +19,7 @@ class UserGroupAssignmentWaitingModel(SpiffworkflowBaseDBModel): id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(255), nullable=False) - group_id = db.Column(ForeignKey(GroupModel.id), nullable=False) + group_id = db.Column(ForeignKey(GroupModel.id), nullable=False, index=True) group = relationship("GroupModel", overlaps="groups,user_group_assignments_waiting,users") # type: ignore From c08d7703041aefb3c090f28210dfb6897ae16eba Mon Sep 17 00:00:00 2001 From: Dan Date: Fri, 17 Mar 2023 16:36:31 -0400 Subject: [PATCH 058/162] If filters are modified, enable the filter button, display a message asking users to press the filter button. Don't render the table. Sigh. --- .../components/ProcessInstanceListTable.tsx | 84 ++++++++++++++----- 1 file changed, 65 insertions(+), 19 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 04710605..c4ccc313 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -139,6 +139,7 @@ export default function ProcessInstanceListTable({ const [startToTimeInvalid, setStartToTimeInvalid] = useState(false); const [endFromTimeInvalid, setEndFromTimeInvalid] = useState(false); const [endToTimeInvalid, setEndToTimeInvalid] = useState(false); + const [requiresRefilter, setRequiresRefilter] = useState(false); const processInstanceListPathPrefix = variant === 'all' @@ -243,6 +244,7 @@ export default function ProcessInstanceListTable({ // eslint-disable-next-line sonarjs/cognitive-complexity useEffect(() => { function setProcessInstancesFromResult(result: any) { + setRequiresRefilter(false); const processInstancesFromApi = result.results; setProcessInstances(processInstancesFromApi); setPagination(result.pagination); @@ -793,6 +795,7 @@ export default function ProcessInstanceListTable({ ); Object.assign(reportMetadataCopy, { columns: newColumns }); setReportMetadata(reportMetadataCopy); + setRequiresRefilter(true); } }; @@ -864,6 +867,7 @@ export default function ProcessInstanceListTable({ setReportMetadata(reportMetadataCopy); setReportColumnToOperateOn(null); setShowReportColumnForm(false); + setRequiresRefilter(true); } }; @@ -891,6 +895,7 @@ export default function ProcessInstanceListTable({ ); } setReportColumnToOperateOn(reportColumnForEditing); + setRequiresRefilter(true); }; // options includes item and inputValue @@ -912,6 +917,7 @@ export default function ProcessInstanceListTable({ }; reportColumnToOperateOnCopy.filter_field_value = event.target.value; setReportColumnToOperateOn(reportColumnToOperateOnCopy); + setRequiresRefilter(true); } }; @@ -950,6 +956,7 @@ export default function ProcessInstanceListTable({ value={reportColumnToOperateOn ? reportColumnToOperateOn.Header : ''} onChange={(event: any) => { if (reportColumnToOperateOn) { + setRequiresRefilter(true); const reportColumnToOperateOnCopy = { ...reportColumnToOperateOn, }; @@ -1091,9 +1098,10 @@ export default function ProcessInstanceListTable({ - setProcessModelSelection(selection.selectedItem) - } + onChange={(selection: any) => { + setProcessModelSelection(selection.selectedItem); + setRequiresRefilter(true); + }} processModels={processModelAvailableItems} selectedItem={processModelSelection} /> @@ -1112,6 +1120,7 @@ export default function ProcessInstanceListTable({ onInputChange={searchForProcessInitiator} onChange={(event: any) => { setProcessInitiatorSelection(event.selectedItem); + setRequiresRefilter(true); }} id="process-instance-initiator-search" data-qa="process-instance-initiator-search" @@ -1133,9 +1142,10 @@ export default function ProcessInstanceListTable({ id="process-instance-initiator-search" placeholder="Enter username" labelText="Process Initiator" - onChange={(event: any) => - setProcessInitiatorText(event.target.value) - } + onChange={(event: any) => { + setProcessInitiatorText(event.target.value); + setRequiresRefilter(true); + }} /> ); }} @@ -1150,8 +1160,14 @@ export default function ProcessInstanceListTable({ 'start-from', startFromDate, startFromTime, - setStartFromDate, - setStartFromTime, + (val: string) => { + setStartFromDate(val); + setRequiresRefilter(true); + }, + (val: string) => { + setStartFromTime(val); + setRequiresRefilter(true); + }, startFromTimeInvalid, setStartFromTimeInvalid )} @@ -1162,8 +1178,14 @@ export default function ProcessInstanceListTable({ 'start-to', startToDate, startToTime, - setStartToDate, - setStartToTime, + (val: string) => { + setStartToDate(val); + setRequiresRefilter(true); + }, + (val: string) => { + setStartToTime(val); + setRequiresRefilter(true); + }, startToTimeInvalid, setStartToTimeInvalid )} @@ -1174,8 +1196,14 @@ export default function ProcessInstanceListTable({ 'end-from', endFromDate, endFromTime, - setEndFromDate, - setEndFromTime, + (val: string) => { + setEndFromDate(val); + setRequiresRefilter(true); + }, + (val: string) => { + setEndFromTime(val); + setRequiresRefilter(true); + }, endFromTimeInvalid, setEndFromTimeInvalid )} @@ -1186,8 +1214,14 @@ export default function ProcessInstanceListTable({ 'end-to', endToDate, endToTime, - setEndToDate, - setEndToTime, + (val: string) => { + setEndToDate(val); + setRequiresRefilter(true); + }, + (val: string) => { + setEndToTime(val); + setRequiresRefilter(true); + }, endToTimeInvalid, setEndToTimeInvalid )} @@ -1205,6 +1239,7 @@ export default function ProcessInstanceListTable({ @@ -1483,7 +1483,7 @@ export default function ProcessInstanceListTable({ if (requiresRefilter) { refilterTextComponent = (

- * Please press the filter button when you have completed updating the + * Please press the Apply button when you have completed updating the filters.

); From d6684124fdf5b4e514ceb92d6c9a758f05a38135 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 20 Mar 2023 16:51:29 -0400 Subject: [PATCH 069/162] use task table for process instance show page. spiff steps are not working yet and neither is data w/ burnettk --- spiffworkflow-backend/conftest.py | 3 +- spiffworkflow-backend/migrations/env.py | 2 + .../{b652c232839f_.py => 4255f548bfb4_.py} | 18 +++-- .../src/spiffworkflow_backend/api.yml | 12 +++ .../models/bpmn_process.py | 7 +- .../routes/process_instances_controller.py | 42 ++++++++-- .../services/process_instance_processor.py | 2 +- .../services/task_service.py | 51 +++++++++--- .../manual_task_with_subprocesses.bpmn | 2 +- .../test_process_to_call.bpmn | 79 +++++++++++++------ .../unit/test_process_instance_processor.py | 21 ++++- .../src/components/ReactDiagramEditor.tsx | 14 ++-- spiffworkflow-frontend/src/interfaces.ts | 13 ++- .../src/routes/ProcessInstanceShow.tsx | 77 +++++++++--------- 14 files changed, 240 insertions(+), 103 deletions(-) rename spiffworkflow-backend/migrations/versions/{b652c232839f_.py => 4255f548bfb4_.py} (97%) diff --git a/spiffworkflow-backend/conftest.py b/spiffworkflow-backend/conftest.py index 304008d0..9d05dfe5 100644 --- a/spiffworkflow-backend/conftest.py +++ b/spiffworkflow-backend/conftest.py @@ -47,7 +47,8 @@ def app() -> Flask: def with_db_and_bpmn_file_cleanup() -> None: """Do it cleanly!""" meta = db.metadata - db.session.execute(db.update(BpmnProcessModel, values={"parent_process_id": None})) + db.session.execute(db.update(BpmnProcessModel, values={"top_level_process_id": None})) + db.session.execute(db.update(BpmnProcessModel, values={"direct_parent_process_id": None})) for table in reversed(meta.sorted_tables): db.session.execute(table.delete()) diff --git a/spiffworkflow-backend/migrations/env.py b/spiffworkflow-backend/migrations/env.py index 630e381a..68feded2 100644 --- a/spiffworkflow-backend/migrations/env.py +++ b/spiffworkflow-backend/migrations/env.py @@ -1,3 +1,5 @@ +from __future__ import with_statement + import logging from logging.config import fileConfig diff --git a/spiffworkflow-backend/migrations/versions/b652c232839f_.py b/spiffworkflow-backend/migrations/versions/4255f548bfb4_.py similarity index 97% rename from spiffworkflow-backend/migrations/versions/b652c232839f_.py rename to spiffworkflow-backend/migrations/versions/4255f548bfb4_.py index dbf5b276..a66c074b 100644 --- a/spiffworkflow-backend/migrations/versions/b652c232839f_.py +++ b/spiffworkflow-backend/migrations/versions/4255f548bfb4_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: b652c232839f +Revision ID: 4255f548bfb4 Revises: -Create Date: 2023-03-17 16:50:32.774216 +Create Date: 2023-03-20 13:00:28.655387 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. -revision = 'b652c232839f' +revision = '4255f548bfb4' down_revision = None branch_labels = None depends_on = None @@ -115,19 +115,22 @@ def upgrade(): sa.Column('id', sa.Integer(), nullable=False), sa.Column('guid', sa.String(length=36), nullable=True), sa.Column('bpmn_process_definition_id', sa.Integer(), nullable=False), - sa.Column('parent_process_id', sa.Integer(), nullable=True), + sa.Column('top_level_process_id', sa.Integer(), nullable=True), + sa.Column('direct_parent_process_id', sa.Integer(), nullable=True), sa.Column('properties_json', sa.JSON(), nullable=False), sa.Column('json_data_hash', sa.String(length=255), nullable=False), sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ), - sa.ForeignKeyConstraint(['parent_process_id'], ['bpmn_process.id'], ), + sa.ForeignKeyConstraint(['direct_parent_process_id'], ['bpmn_process.id'], ), + sa.ForeignKeyConstraint(['top_level_process_id'], ['bpmn_process.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('guid') ) op.create_index(op.f('ix_bpmn_process_bpmn_process_definition_id'), 'bpmn_process', ['bpmn_process_definition_id'], unique=False) + op.create_index(op.f('ix_bpmn_process_direct_parent_process_id'), 'bpmn_process', ['direct_parent_process_id'], unique=False) op.create_index(op.f('ix_bpmn_process_json_data_hash'), 'bpmn_process', ['json_data_hash'], unique=False) - op.create_index(op.f('ix_bpmn_process_parent_process_id'), 'bpmn_process', ['parent_process_id'], unique=False) + op.create_index(op.f('ix_bpmn_process_top_level_process_id'), 'bpmn_process', ['top_level_process_id'], unique=False) op.create_table('bpmn_process_definition_relationship', sa.Column('id', sa.Integer(), nullable=False), sa.Column('bpmn_process_definition_parent_id', sa.Integer(), nullable=False), @@ -519,8 +522,9 @@ def downgrade(): op.drop_index(op.f('ix_bpmn_process_definition_relationship_bpmn_process_definition_child_id'), table_name='bpmn_process_definition_relationship') op.drop_index(op.f('ix_bpmn_process_definition_relationship_bpmn_process_definition_parent_id'), table_name='bpmn_process_definition_relationship') op.drop_table('bpmn_process_definition_relationship') - op.drop_index(op.f('ix_bpmn_process_parent_process_id'), table_name='bpmn_process') + op.drop_index(op.f('ix_bpmn_process_top_level_process_id'), table_name='bpmn_process') op.drop_index(op.f('ix_bpmn_process_json_data_hash'), table_name='bpmn_process') + op.drop_index(op.f('ix_bpmn_process_direct_parent_process_id'), table_name='bpmn_process') op.drop_index(op.f('ix_bpmn_process_bpmn_process_definition_id'), table_name='bpmn_process') op.drop_table('bpmn_process') op.drop_index(op.f('ix_user_service_id'), table_name='user') diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index b71bed93..7cffde1c 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -919,6 +919,12 @@ paths: description: If true, this wil return only the most recent tasks. schema: type: boolean + - name: bpmn_process_guid + in: query + required: false + description: The guid of the bpmn process to get the tasks for. + schema: + type: string get: tags: - Process Instances @@ -972,6 +978,12 @@ paths: description: If true, this wil return only the most recent tasks. schema: type: boolean + - name: bpmn_process_guid + in: query + required: false + description: The guid of the bpmn process to get the tasks for. + schema: + type: string get: tags: - Process Instances diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py index 22bdfa70..c38fed7b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py @@ -8,6 +8,10 @@ from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel +class BpmnProcessNotFoundError(Exception): + pass + + # properties_json attributes: # "last_task", # guid generated by spiff # "root", # guid generated by spiff @@ -24,7 +28,8 @@ class BpmnProcessModel(SpiffworkflowBaseDBModel): ) bpmn_process_definition = relationship(BpmnProcessDefinitionModel) - parent_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True, index=True) + top_level_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True, index=True) + direct_parent_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True, index=True) properties_json: dict = db.Column(db.JSON, nullable=False) json_data_hash: str = db.Column(db.String(255), nullable=False, index=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 59399f2f..f75df6c1 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -1,5 +1,7 @@ """APIs for dealing with process groups, process models, and process instances.""" import base64 +from spiffworkflow_backend.services.task_service import TaskService +from sqlalchemy.orm import aliased from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel import json from typing import Any @@ -560,6 +562,7 @@ def process_instance_task_list_without_task_data_for_me( all_tasks: bool = False, spiff_step: int = 0, most_recent_tasks_only: bool = False, + bpmn_process_guid: Optional[str] = None, ) -> flask.wrappers.Response: """Process_instance_task_list_without_task_data_for_me.""" process_instance = _find_process_instance_for_me_or_raise(process_instance_id) @@ -569,6 +572,7 @@ def process_instance_task_list_without_task_data_for_me( all_tasks=all_tasks, spiff_step=spiff_step, most_recent_tasks_only=most_recent_tasks_only, + bpmn_process_guid=bpmn_process_guid ) @@ -578,6 +582,7 @@ def process_instance_task_list_without_task_data( all_tasks: bool = False, spiff_step: int = 0, most_recent_tasks_only: bool = False, + bpmn_process_guid: Optional[str] = None, ) -> flask.wrappers.Response: """Process_instance_task_list_without_task_data.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) @@ -587,12 +592,14 @@ def process_instance_task_list_without_task_data( all_tasks=all_tasks, spiff_step=spiff_step, most_recent_tasks_only=most_recent_tasks_only, + bpmn_process_guid=bpmn_process_guid ) def process_instance_task_list( _modified_process_model_identifier: str, process_instance: ProcessInstanceModel, + bpmn_process_guid: Optional[str] = None, all_tasks: bool = False, spiff_step: int = 0, to_task_guid: Optional[str] = None, @@ -644,9 +651,14 @@ def process_instance_task_list( # state: string; # typename: string; - # calling_subprocess_task_guid: string; - # call_activity_process_bpmn_identifier?: string; + # calling_subprocess_task_guid: string; -> bpmn_process_direct_parent_guid + # call_activity_process_bpmn_identifier?: string; -> bpmn_process_direct_parent_bpmn_identifier + bpmn_process_ids = [] + if bpmn_process_guid: + bpmn_process = BpmnProcessModel.query.filter_by(guid=bpmn_process_guid).first() + bpmn_processes = TaskService.bpmn_process_and_descendants([bpmn_process]) + bpmn_process_ids = [p.id for p in bpmn_processes] task_model_query = db.session.query(TaskModel).filter( TaskModel.process_instance_id == process_instance.id, @@ -664,23 +676,39 @@ def process_instance_task_list( ) task_model_query = task_model_query.filter(TaskModel.end_in_seconds <= to_task_model.end_in_seconds) + bpmn_process_alias = aliased(BpmnProcessModel) + direct_parent_bpmn_process_alias = aliased(BpmnProcessModel) + direct_parent_bpmn_process_definition_alias = aliased(BpmnProcessDefinitionModel) + task_model_query = ( task_model_query.order_by( - ProcessInstanceEventModel.timestamp.desc(), ProcessInstanceEventModel.id.desc() # type: ignore + TaskModel.id.desc() # type: ignore ) .join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id) - .join(BpmnProcessModel, BpmnProcessModel.id == TaskModel.bpmn_process_id) + .join(bpmn_process_alias, bpmn_process_alias.id == TaskModel.bpmn_process_id) + .outerjoin(direct_parent_bpmn_process_alias, direct_parent_bpmn_process_alias.id == bpmn_process_alias.direct_parent_process_id) + .outerjoin(direct_parent_bpmn_process_definition_alias, direct_parent_bpmn_process_definition_alias.id == direct_parent_bpmn_process_alias.bpmn_process_definition_id) .join( BpmnProcessDefinitionModel, BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id ) .add_columns( BpmnProcessDefinitionModel.bpmn_identifier.label("bpmn_process_definition_identifier"), # type: ignore BpmnProcessDefinitionModel.bpmn_name.label("bpmn_process_definition_name"), # type: ignore - TaskDefinitionModel.bpmn_identifier.label("task_definition_identifier"), # type: ignore - TaskDefinitionModel.bpmn_name.label("task_definition_name"), # type: ignore - TaskDefinitionModel.typename.label("bpmn_task_type"), # type: ignore + direct_parent_bpmn_process_alias.guid.label("bpmn_process_direct_parent_guid"), + direct_parent_bpmn_process_definition_alias.bpmn_identifier.label("bpmn_process_direct_parent_bpmn_identifier"), + TaskDefinitionModel.bpmn_identifier, + TaskDefinitionModel.bpmn_name, + TaskDefinitionModel.typename, + TaskDefinitionModel.properties_json.label('task_definition_properties_json'), # type: ignore ) ) + + if len(bpmn_process_ids) > 0: + print(f"bpmn_process_ids: {bpmn_process_ids}") + task_model_query = ( + task_model_query.filter(bpmn_process_alias.id.in_(bpmn_process_ids)) + ) + task_models = task_model_query.all() # processor = ProcessInstanceProcessor(process_instance) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index ea59c414..fdd42cb9 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -687,7 +687,7 @@ class ProcessInstanceProcessor: single_bpmn_process_dict = cls._get_bpmn_process_dict(bpmn_process, get_tasks=True) spiff_bpmn_process_dict.update(single_bpmn_process_dict) - bpmn_subprocesses = BpmnProcessModel.query.filter_by(parent_process_id=bpmn_process.id).all() + bpmn_subprocesses = BpmnProcessModel.query.filter_by(top_level_process_id=bpmn_process.id).all() bpmn_subprocess_id_to_guid_mappings = {} for bpmn_subprocess in bpmn_subprocesses: bpmn_subprocess_id_to_guid_mappings[bpmn_subprocess.id] = bpmn_subprocess.guid diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 5a03f387..fa902406 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -13,7 +13,7 @@ from SpiffWorkflow.task import TaskStateNames from sqlalchemy.dialects.mysql import insert as mysql_insert from sqlalchemy.dialects.postgresql import insert as postgres_insert -from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel +from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel, BpmnProcessNotFoundError from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401 from spiffworkflow_backend.models.process_instance import ProcessInstanceModel @@ -144,7 +144,7 @@ class TaskService: bpmn_process, new_task_models, new_json_data_dicts = cls.add_bpmn_process( bpmn_process_dict=serializer.workflow_to_dict(subprocess), process_instance=process_instance, - bpmn_process_parent=process_instance.bpmn_process, + top_level_process=process_instance.bpmn_process, bpmn_process_guid=subprocess_guid, bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings, spiff_workflow=spiff_workflow, @@ -160,7 +160,7 @@ class TaskService: bpmn_definition_to_task_definitions_mappings: dict, spiff_workflow: BpmnWorkflow, serializer: BpmnWorkflowSerializer, - bpmn_process_parent: Optional[BpmnProcessModel] = None, + top_level_process: Optional[BpmnProcessModel] = None, bpmn_process_guid: Optional[str] = None, ) -> Tuple[BpmnProcessModel, dict[str, TaskModel], dict[str, JsonDataDict]]: """This creates and adds a bpmn_process to the Db session. @@ -182,9 +182,9 @@ class TaskService: new_json_data_dicts: dict[str, JsonDataDict] = {} bpmn_process = None - if bpmn_process_parent is not None: + if top_level_process is not None: bpmn_process = BpmnProcessModel.query.filter_by( - parent_process_id=bpmn_process_parent.id, guid=bpmn_process_guid + top_level_process_id=top_level_process.id, guid=bpmn_process_guid ).first() elif process_instance.bpmn_process_id is not None: bpmn_process = process_instance.bpmn_process @@ -194,6 +194,28 @@ class TaskService: bpmn_process_is_new = True bpmn_process = BpmnProcessModel(guid=bpmn_process_guid) + bpmn_process_definition = bpmn_definition_to_task_definitions_mappings[spiff_workflow.spec.name][ + "bpmn_process_definition" + ] + bpmn_process.bpmn_process_definition = bpmn_process_definition + + if top_level_process is not None: + subprocesses = spiff_workflow._get_outermost_workflow().subprocesses + direct_bpmn_process_parent = top_level_process + for subprocess_guid, subprocess in subprocesses.items(): + if subprocess == spiff_workflow.outer_workflow: + direct_bpmn_process_parent = BpmnProcessModel.query.filter_by(guid=str(subprocess_guid)).first() + if direct_bpmn_process_parent is None: + raise BpmnProcessNotFoundError( + f"Could not find bpmn process with guid: {str(subprocess_guid)} " + f"while searching for direct parent process of {bpmn_process_guid}." + ) + + if direct_bpmn_process_parent is None: + raise BpmnProcessNotFoundError(f"Could not find a direct bpmn process parent for guid: {bpmn_process_guid}") + + bpmn_process.direct_parent_process_id = direct_bpmn_process_parent.id + # Point the root id to the Start task instead of the Root task # since we are ignoring the Root task. for task_id, task_properties in tasks.items(): @@ -206,15 +228,10 @@ class TaskService: if bpmn_process_json_data is not None: new_json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data - if bpmn_process_parent is None: + if top_level_process is None: process_instance.bpmn_process = bpmn_process - elif bpmn_process.parent_process_id is None: - bpmn_process.parent_process_id = bpmn_process_parent.id - - bpmn_process_definition = bpmn_definition_to_task_definitions_mappings[spiff_workflow.spec.name][ - "bpmn_process_definition" - ] - bpmn_process.bpmn_process_definition = bpmn_process_definition + elif bpmn_process.top_level_process_id is None: + bpmn_process.top_level_process_id = top_level_process.id # Since we bulk insert tasks later we need to add the bpmn_process to the session # to ensure we have an id. @@ -285,6 +302,14 @@ class TaskService: setattr(task_model, task_model_data_column, task_data_hash) return json_data_dict + @classmethod + def bpmn_process_and_descendants(cls, bpmn_processes: list[BpmnProcessModel]) -> list[BpmnProcessModel]: + bpmn_process_ids = [p.id for p in bpmn_processes] + direct_children = BpmnProcessModel.query.filter(BpmnProcessModel.direct_parent_process_id.in_(bpmn_process_ids)).all() # type: ignore + if len(direct_children) > 0: + return bpmn_processes + cls.bpmn_process_and_descendants(direct_children) + return bpmn_processes + @classmethod def _create_task( cls, diff --git a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn index 939c8c0b..680903f5 100644 --- a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn @@ -151,4 +151,4 @@ except: -
+ \ No newline at end of file diff --git a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn index 299f078e..afda130a 100644 --- a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn @@ -1,38 +1,71 @@ - - Flow_06g687y - - - - Flow_01e21r0 + + Flow_095sred - - - Flow_06g687y - Flow_01e21r0 - set_in_test_process_to_call_script = 1 - + + Flow_1qsx5et + + + Flow_1qsx5et + Flow_095sred + + Flow_12zb3j0 + + + Flow_12zb3j0 + Flow_0iu4d71 + set_in_test_process_to_call_script = 1 + + + Flow_0iu4d71 + + + + + + - - + + + + + + + + + + + + + + + + + + + + + + + - + - - + + - - - + + + - - - + + + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 70f97328..0b80a46c 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -326,11 +326,11 @@ class TestProcessInstanceProcessor(BaseTest): "manual_task": first_data_set, "top_level_subprocess_script": second_data_set, "top_level_subprocess": second_data_set, - "test_process_to_call_script": third_data_set, + "test_process_to_call_subprocess_script": third_data_set, "top_level_call_activity": third_data_set, "end_event_of_manual_task_model": third_data_set, "top_level_subprocess_script_second": fourth_data_set, - "test_process_to_call_script_second": fourth_data_set, + "test_process_to_call_subprocess_script_second": fourth_data_set, } spiff_tasks_checked_once: list = [] @@ -365,7 +365,7 @@ class TestProcessInstanceProcessor(BaseTest): assert len(all_spiff_tasks) > 1 for spiff_task in all_spiff_tasks: assert spiff_task.state == TaskState.COMPLETED - assert_spiff_task_is_in_process("test_process_to_call_script", "test_process_to_call") + assert_spiff_task_is_in_process("test_process_to_call_subprocess_script", "test_process_to_call_subprocess") assert_spiff_task_is_in_process("top_level_subprocess_script", "top_level_subprocess") assert_spiff_task_is_in_process("top_level_script", "top_level_process") @@ -378,6 +378,21 @@ class TestProcessInstanceProcessor(BaseTest): assert bpmn_process_definition.bpmn_identifier == "test_process_to_call" assert bpmn_process_definition.bpmn_name == "Test Process To Call" + # Check that the direct parent of the called activity subprocess task is the + # name of the process that was called from the activity. + if spiff_task.task_spec.name == "test_process_to_call_subprocess_script": + task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() + assert task_model is not None + bpmn_process = task_model.bpmn_process + assert bpmn_process is not None + bpmn_process_definition = bpmn_process.bpmn_process_definition + assert bpmn_process_definition is not None + assert bpmn_process_definition.bpmn_identifier == "test_process_to_call_subprocess" + assert bpmn_process.direct_parent_process_id is not None + direct_parent_process = BpmnProcessModel.query.filter_by(id=bpmn_process.direct_parent_process_id).first() + assert direct_parent_process is not None + assert direct_parent_process.bpmn_process_definition.bpmn_identifier == "test_process_to_call" + assert processor.get_data() == fifth_data_set def test_does_not_recreate_human_tasks_on_multiple_saves( diff --git a/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx b/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx index 126f0c4f..e3989c63 100644 --- a/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx +++ b/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx @@ -60,14 +60,14 @@ import HttpService from '../services/HttpService'; import ButtonWithConfirmation from './ButtonWithConfirmation'; import { getBpmnProcessIdentifiers, makeid } from '../helpers'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; -import { PermissionsToCheck, ProcessInstanceTask } from '../interfaces'; +import { PermissionsToCheck, Task } from '../interfaces'; import { usePermissionFetcher } from '../hooks/PermissionService'; type OwnProps = { processModelId: string; diagramType: string; - readyOrWaitingProcessInstanceTasks?: ProcessInstanceTask[] | null; - completedProcessInstanceTasks?: ProcessInstanceTask[] | null; + readyOrWaitingProcessInstanceTasks?: Task[] | null; + completedProcessInstanceTasks?: Task[] | null; saveDiagram?: (..._args: any[]) => any; onDeleteFile?: (..._args: any[]) => any; isPrimaryFile?: boolean; @@ -364,18 +364,18 @@ export default function ReactDiagramEditor({ function highlightBpmnIoElement( canvas: any, - processInstanceTask: ProcessInstanceTask, + task: Task, bpmnIoClassName: string, bpmnProcessIdentifiers: string[] ) { - if (checkTaskCanBeHighlighted(processInstanceTask.name)) { + if (checkTaskCanBeHighlighted(task.bpmn_identifier)) { try { if ( bpmnProcessIdentifiers.includes( - processInstanceTask.process_identifier + task.bpmn_process_definition_identifier ) ) { - canvas.addMarker(processInstanceTask.name, bpmnIoClassName); + canvas.addMarker(task.bpmn_identifier, bpmnIoClassName); } } catch (bpmnIoError: any) { // the task list also contains task for processes called from call activities which will diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index aaf11ade..8b61f474 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -21,17 +21,26 @@ export interface RecentProcessModel { processModelDisplayName: string; } +export interface TaskDefinitionPropertiesJson { + spec: string; +} + export interface Task { + id: number; guid: string; bpmn_identifier: string; bpmn_name?: string; - calling_subprocess_task_guid: string; + bpmn_process_direct_parent_guid: string; + bpmn_process_definition_identifier: string; data: any; state: string; typename: string; - call_activity_process_bpmn_identifier?: string; + task_definition_properties_json: TaskDefinitionPropertiesJson; + + // TOOD: DELETE THIS! + task_spiff_step?: number; } export interface ProcessInstanceTask { diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 36c06d23..1b555a04 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -46,7 +46,8 @@ import { ProcessData, ProcessInstance, ProcessInstanceMetadata, - ProcessInstanceTask, + Task, + TaskDefinitionPropertiesJson, } from '../interfaces'; import { usePermissionFetcher } from '../hooks/PermissionService'; import ProcessInstanceClass from '../classes/ProcessInstanceClass'; @@ -64,10 +65,9 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const [processInstance, setProcessInstance] = useState(null); - const [tasks, setTasks] = useState(null); + const [tasks, setTasks] = useState(null); const [tasksCallHadError, setTasksCallHadError] = useState(false); - const [taskToDisplay, setTaskToDisplay] = - useState(null); + const [taskToDisplay, setTaskToDisplay] = useState(null); const [taskDataToDisplay, setTaskDataToDisplay] = useState(''); const [showTaskDataLoading, setShowTaskDataLoading] = useState(false); @@ -148,6 +148,10 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { if (typeof params.spiff_step !== 'undefined') { taskParams = `${taskParams}&spiff_step=${params.spiff_step}`; } + const bpmnProcessGuid = searchParams.get('bpmn_process_guid'); + if (bpmnProcessGuid) { + taskParams = `${taskParams}&bpmn_process_guid=${bpmnProcessGuid}`; + } let taskPath = ''; if (ability.can('GET', taskListPath)) { taskPath = `${taskListPath}${taskParams}`; @@ -213,14 +217,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const getTaskIds = () => { const taskIds = { completed: [], readyOrWaiting: [] }; if (tasks) { - const callingSubprocessId = searchParams.get('call_activity_task_id'); - tasks.forEach(function getUserTasksElement(task: ProcessInstanceTask) { - if ( - callingSubprocessId && - callingSubprocessId !== task.calling_subprocess_task_id - ) { - return null; - } + tasks.forEach(function getUserTasksElement(task: Task) { if (task.state === 'COMPLETED') { (taskIds.completed as any).push(task); } @@ -251,13 +248,13 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const spiffStepLink = (label: any, spiffStep: number) => { const processIdentifier = searchParams.get('process_identifier'); - const callActivityTaskId = searchParams.get('call_activity_task_id'); + const callActivityTaskId = searchParams.get('bpmn_process_guid'); const queryParamArray = []; if (processIdentifier) { queryParamArray.push(`process_identifier=${processIdentifier}`); } if (callActivityTaskId) { - queryParamArray.push(`call_activity_task_id=${callActivityTaskId}`); + queryParamArray.push(`bpmn_process_guid=${callActivityTaskId}`); } let queryParams = ''; if (queryParamArray.length > 0) { @@ -509,7 +506,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return
; }; - const processTaskResult = (result: ProcessInstanceTask) => { + const processTaskResult = (result: Task) => { if (result == null) { setTaskDataToDisplay(''); } else { @@ -518,7 +515,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { setShowTaskDataLoading(false); }; - const initializeTaskDataToDisplay = (task: ProcessInstanceTask | null) => { + const initializeTaskDataToDisplay = (task: Task | null) => { if ( task && task.state === 'COMPLETED' && @@ -526,7 +523,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ) { setShowTaskDataLoading(true); HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceTaskDataPath}/${task.task_spiff_step}`, + path: `${targetUris.processInstanceTaskDataPath}/${task.id}`, httpMethod: 'GET', successCallback: processTaskResult, failureCallback: (error: any) => { @@ -577,13 +574,12 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { successCallback: handleProcessDataShowResponse, }); } else if (tasks) { - const matchingTask: any = tasks.find((task: any) => { - const callingSubprocessId = searchParams.get('call_activity_task_id'); + const matchingTask: Task | undefined = tasks.find((task: Task) => { return ( - (!callingSubprocessId || - callingSubprocessId === task.calling_subprocess_task_id) && - task.name === shapeElement.id && - bpmnProcessIdentifiers.includes(task.process_identifier) + task.bpmn_identifier === shapeElement.id && + bpmnProcessIdentifiers.includes( + task.bpmn_process_definition_identifier + ) ); }); if (matchingTask) { @@ -618,7 +614,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { httpMethod: 'POST', successCallback: processScriptUnitTestCreateResult, postBody: { - bpmn_task_identifier: taskToUse.name, + bpmn_task_identifier: taskToUse.bpmn_identifier, input_json: previousTask.data, expected_output_json: taskToUse.data, }, @@ -634,7 +630,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ]; return ( (task.state === 'WAITING' && - subprocessTypes.filter((t) => t === task.type).length > 0) || + subprocessTypes.filter((t) => t === task.typename).length > 0) || task.state === 'READY' ); }; @@ -656,7 +652,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { processInstance && processInstance.status === 'waiting' && ability.can('POST', targetUris.processInstanceSendEventPath) && - taskTypes.filter((t) => t === task.type).length > 0 && + taskTypes.filter((t) => t === task.typename).length > 0 && task.state === 'WAITING' && showingLastSpiffStep() ); @@ -717,7 +713,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { setEditingTaskData(false); const dataObject = taskDataStringToObject(taskDataToDisplay); if (taskToDisplay) { - const taskToDisplayCopy: ProcessInstanceTask = { + const taskToDisplayCopy: Task = { ...taskToDisplay, data: dataObject, }; // spread operator @@ -768,11 +764,11 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }); }; - const taskDisplayButtons = (task: any) => { + const taskDisplayButtons = (task: Task) => { const buttons = []; if ( - task.type === 'Script Task' && + task.typename === 'Script Task' && ability.can('PUT', targetUris.processModelShowPath) ) { buttons.push( @@ -785,11 +781,15 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); } - if (task.type === 'Call Activity') { + if (task.typename === 'CallActivity') { + console.log('task', task) + const taskDefinitionPropertiesJson: TaskDefinitionPropertiesJson = + task.task_definition_properties_json; + console.log('taskDefinitionPropertiesJson', taskDefinitionPropertiesJson) buttons.push( View Call Activity Diagram @@ -971,12 +971,15 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; const taskUpdateDisplayArea = () => { - const taskToUse: any = { ...taskToDisplay, data: taskDataToDisplay }; + if (!taskToDisplay) { + return null; + } + const taskToUse: Task = { ...taskToDisplay, data: taskDataToDisplay }; const candidateEvents: any = getEvents(taskToUse); if (taskToDisplay) { - let taskTitleText = taskToUse.id; - if (taskToUse.title) { - taskTitleText += ` (${taskToUse.title})`; + let taskTitleText = taskToUse.guid; + if (taskToUse.bpmn_name) { + taskTitleText += ` (${taskToUse.bpmn_name})`; } return ( - {taskToUse.name} ( - {taskToUse.type} + {taskToUse.bpmn_identifier} ( + {taskToUse.typename} ): {taskToUse.state} {taskDisplayButtons(taskToUse)} From 28bda31802ee76721c321b1742cae6f4bd6ea386 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 20 Mar 2023 17:05:15 -0400 Subject: [PATCH 070/162] get task data from json data table now w/ burnettk --- .../src/spiffworkflow_backend/api.yml | 18 ++++----- .../src/spiffworkflow_backend/models/task.py | 2 + .../routes/tasks_controller.py | 38 +++++-------------- .../src/routes/ProcessInstanceShow.tsx | 2 +- 4 files changed, 22 insertions(+), 38 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 7cffde1c..43d32c5e 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -1585,7 +1585,7 @@ paths: items: $ref: "#/components/schemas/Task" - /task-data/{modified_process_model_identifier}/{process_instance_id}/{spiff_step}: + /task-data/{modified_process_model_identifier}/{process_instance_id}/{task_guid}: parameters: - name: modified_process_model_identifier in: path @@ -1599,12 +1599,12 @@ paths: description: The unique id of an existing process instance. schema: type: integer - - name: spiff_step + - name: task_guid in: path required: true - description: If set will return the tasks as they were during a specific step of execution. + description: The guid of the task to show. schema: - type: integer + type: string get: operationId: spiffworkflow_backend.routes.tasks_controller.task_data_show summary: Get task data for a single task in a spiff step. @@ -1638,12 +1638,12 @@ paths: description: The unique id of the task. schema: type: string - - name: spiff_step - in: query - required: false - description: If set will return the tasks as they were during a specific step of execution. + - name: task_guid + in: path + required: true + description: The guid of the task to show. schema: - type: integer + type: string put: operationId: spiffworkflow_backend.routes.process_api_blueprint.task_data_update summary: Update the task data for requested instance and task diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index 70a60ae1..a3e182c0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -66,6 +66,8 @@ class TaskModel(SpiffworkflowBaseDBModel): start_in_seconds: float = db.Column(db.DECIMAL(17, 6)) end_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6)) + data: Optional[dict] = None + def python_env_data(self) -> dict: return JsonDataModel.find_data_dict_by_hash(self.python_env_data_hash) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index ad9868e6..37c29575 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -1,5 +1,6 @@ """APIs for dealing with process groups, process models, and process instances.""" import json +from spiffworkflow_backend.models.task import TaskModel # noqa: F401 import os import uuid from sys import exc_info @@ -169,38 +170,19 @@ def task_list_for_my_groups( def task_data_show( modified_process_model_identifier: str, process_instance_id: int, - spiff_step: int = 0, + task_guid: int = 0, ) -> flask.wrappers.Response: - process_instance = _find_process_instance_by_id_or_raise(process_instance_id) - step_detail = ( - db.session.query(SpiffStepDetailsModel) - .filter( - SpiffStepDetailsModel.process_instance_id == process_instance.id, - SpiffStepDetailsModel.spiff_step == spiff_step, - ) - .first() - ) - - if step_detail is None: + task_model = TaskModel.query.filter_by(guid=task_guid, process_instance_id=process_instance_id).first() + if task_model is None: raise ApiError( - error_code="spiff_step_for_proces_instance_not_found", - message="The given spiff step for the given process instance could not be found.", + error_code="task_not_found", + message=( + f"Cannot find a task with guid '{task_guid}' for process instance '{process_instance_id}'" + ), status_code=400, ) - - processor = ProcessInstanceProcessor(process_instance) - spiff_task = processor.__class__.get_task_by_bpmn_identifier( - step_detail.bpmn_task_identifier, processor.bpmn_process_instance - ) - task_data = step_detail.task_json["task_data"] | step_detail.task_json["python_env"] - task = ProcessInstanceService.spiff_task_to_api_task( - processor, - spiff_task, - task_spiff_step=spiff_step, - ) - task.data = task_data - - return make_response(jsonify(task), 200) + task_model.data = task_model.json_data() + return make_response(jsonify(task_model), 200) def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None: diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 1b555a04..33b29ae3 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -523,7 +523,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ) { setShowTaskDataLoading(true); HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceTaskDataPath}/${task.id}`, + path: `${targetUris.processInstanceTaskDataPath}/${task.guid}`, httpMethod: 'GET', successCallback: processTaskResult, failureCallback: (error: any) => { From 41a3bbc475026308869c9dd25b5db7c2f01dd3e7 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 20 Mar 2023 17:29:53 -0400 Subject: [PATCH 071/162] send to task guid to signify how far to go with the tasks w/ burnettk --- .../src/spiffworkflow_backend/api.yml | 24 +++---- .../routes/process_instances_controller.py | 8 ++- .../src/routes/AdminRoutes.tsx | 4 +- .../src/routes/ProcessInstanceShow.tsx | 71 +++++-------------- 4 files changed, 37 insertions(+), 70 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 43d32c5e..1a21e643 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -907,12 +907,6 @@ paths: description: If true, this wil return all tasks associated with the process instance and not just user tasks. schema: type: boolean - - name: spiff_step - in: query - required: false - description: If set will return the tasks as they were during a specific step of execution. - schema: - type: integer - name: most_recent_tasks_only in: query required: false @@ -925,6 +919,12 @@ paths: description: The guid of the bpmn process to get the tasks for. schema: type: string + - name: to_task_guid + in: query + required: false + description: Get the tasks only up to the given guid. + schema: + type: string get: tags: - Process Instances @@ -966,12 +966,6 @@ paths: description: If true, this wil return all tasks associated with the process instance and not just user tasks. schema: type: boolean - - name: spiff_step - in: query - required: false - description: If set will return the tasks as they were during a specific step of execution. - schema: - type: integer - name: most_recent_tasks_only in: query required: false @@ -984,6 +978,12 @@ paths: description: The guid of the bpmn process to get the tasks for. schema: type: string + - name: to_task_guid + in: query + required: false + description: Get the tasks only up to the given guid. + schema: + type: string get: tags: - Process Instances diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index f75df6c1..ccc46358 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -563,6 +563,7 @@ def process_instance_task_list_without_task_data_for_me( spiff_step: int = 0, most_recent_tasks_only: bool = False, bpmn_process_guid: Optional[str] = None, + to_task_guid: Optional[str] = None, ) -> flask.wrappers.Response: """Process_instance_task_list_without_task_data_for_me.""" process_instance = _find_process_instance_for_me_or_raise(process_instance_id) @@ -572,7 +573,8 @@ def process_instance_task_list_without_task_data_for_me( all_tasks=all_tasks, spiff_step=spiff_step, most_recent_tasks_only=most_recent_tasks_only, - bpmn_process_guid=bpmn_process_guid + bpmn_process_guid=bpmn_process_guid, + to_task_guid=to_task_guid, ) @@ -583,6 +585,7 @@ def process_instance_task_list_without_task_data( spiff_step: int = 0, most_recent_tasks_only: bool = False, bpmn_process_guid: Optional[str] = None, + to_task_guid: Optional[str] = None, ) -> flask.wrappers.Response: """Process_instance_task_list_without_task_data.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) @@ -592,7 +595,8 @@ def process_instance_task_list_without_task_data( all_tasks=all_tasks, spiff_step=spiff_step, most_recent_tasks_only=most_recent_tasks_only, - bpmn_process_guid=bpmn_process_guid + bpmn_process_guid=bpmn_process_guid, + to_task_guid=to_task_guid, ) diff --git a/spiffworkflow-frontend/src/routes/AdminRoutes.tsx b/spiffworkflow-frontend/src/routes/AdminRoutes.tsx index d183dc01..d04d50b1 100644 --- a/spiffworkflow-frontend/src/routes/AdminRoutes.tsx +++ b/spiffworkflow-frontend/src/routes/AdminRoutes.tsx @@ -73,7 +73,7 @@ export default function AdminRoutes() { element={} /> } /> } /> } /> { - if (processInstance && typeof params.spiff_step === 'undefined') { + if (processInstance && typeof params.to_task_guid === 'undefined') { return processInstance.spiff_step || 0; } @@ -246,7 +246,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return processInstance && currentSpiffStep() === processInstance.spiff_step; }; - const spiffStepLink = (label: any, spiffStep: number) => { + const completionViewLink = (label: any, taskGuid: string) => { const processIdentifier = searchParams.get('process_identifier'); const callActivityTaskId = searchParams.get('bpmn_process_guid'); const queryParamArray = []; @@ -265,29 +265,13 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { {label} ); }; - const previousStepLink = () => { - if (showingFirstSpiffStep()) { - return null; - } - - return spiffStepLink(, currentSpiffStep() - 1); - }; - - const nextStepLink = () => { - if (showingLastSpiffStep()) { - return null; - } - - return spiffStepLink(, currentSpiffStep() + 1); - }; - const returnToLastSpiffStep = () => { window.location.href = processInstanceShowPageBaseUrl; }; @@ -782,10 +766,10 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { } if (task.typename === 'CallActivity') { - console.log('task', task) + console.log('task', task); const taskDefinitionPropertiesJson: TaskDefinitionPropertiesJson = task.task_definition_properties_json; - console.log('taskDefinitionPropertiesJson', taskDefinitionPropertiesJson) + console.log('taskDefinitionPropertiesJson', taskDefinitionPropertiesJson); buttons.push( - {taskToUse.task_spiff_step ? ( -
- - Task completed at step:{' '} - {spiffStepLink( - `${taskToUse.task_spiff_step}`, - taskToUse.task_spiff_step - )} - -
-
-
- ) : null} +
+ + {completionViewLink( + 'View state at task completion', + taskToUse.guid + )} + +
+
+
{selectingEvent ? eventSelector(candidateEvents) : taskDataContainer()} @@ -1015,23 +996,6 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return null; }; - const stepsElement = () => { - if (!processInstance) { - return null; - } - return ( - - - - {previousStepLink()} - Step {currentSpiffStep()} of {processInstance.spiff_step} - {nextStepLink()} - - - - ); - }; - const buttonIcons = () => { if (!processInstance) { return null; @@ -1119,7 +1083,6 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { {taskUpdateDisplayArea()} {processDataDisplayArea()} {processInstanceMetadataArea()} - {stepsElement()}
Date: Mon, 20 Mar 2023 17:47:52 -0400 Subject: [PATCH 072/162] checking for "falsy" lists in python will return false if the list is empty -- but we want to clear out the columns and filters if they are empty lists. --- .../services/process_instance_report_service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index 3de0319e..0f62a738 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -455,9 +455,9 @@ class ProcessInstanceReportService: instance_metadata_aliases = {} stock_columns = ProcessInstanceReportService.get_column_names_for_model(ProcessInstanceModel) - if report_filter.report_column_list: + if isinstance(report_filter.report_column_list, list): process_instance_report.report_metadata["columns"] = report_filter.report_column_list - if report_filter.report_filter_by_list: + if isinstance(report_filter.report_filter_by_list, list): process_instance_report.report_metadata["filter_by"] = report_filter.report_filter_by_list for column in process_instance_report.report_metadata["columns"]: From e9339e4591f24a8a43f3852cc67dcdeece1e909d Mon Sep 17 00:00:00 2001 From: jbirddog <100367399+jbirddog@users.noreply.github.com> Date: Tue, 21 Mar 2023 09:37:10 -0400 Subject: [PATCH 073/162] Optimisticly skip locking/background processing (#190) --- .../src/spiffworkflow_backend/__init__.py | 5 +++- .../spiffworkflow_backend/config/default.py | 9 +++++++ .../services/process_instance_service.py | 26 +++++++++++++++++-- 3 files changed, 37 insertions(+), 3 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py index 3e2191c8..3d216dc6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py @@ -69,6 +69,9 @@ def start_scheduler(app: flask.app.Flask, scheduler_class: BaseScheduler = Backg # TODO: polling intervals for different jobs polling_interval_in_seconds = app.config["SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS"] + user_input_required_polling_interval_in_seconds = app.config[ + "SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_USER_INPUT_REQUIRED_POLLING_INTERVAL_IN_SECONDS" + ] # TODO: add job to release locks to simplify other queries # TODO: add job to delete completed entires # TODO: add job to run old/low priority instances so they do not get drowned out @@ -86,7 +89,7 @@ def start_scheduler(app: flask.app.Flask, scheduler_class: BaseScheduler = Backg scheduler.add_job( BackgroundProcessingService(app).process_user_input_required_process_instances, "interval", - seconds=120, + seconds=user_input_required_polling_interval_in_seconds, ) scheduler.start() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py index 2af3e7df..1805e8af 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py @@ -18,12 +18,21 @@ SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS = re.split( SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = ( environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") == "true" ) +SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_ALLOW_OPTIMISTIC_CHECKS = ( + environ.get("SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_ALLOW_OPTIMISTIC_CHECKS", default="true") == "true" +) SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS = int( environ.get( "SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS", default="10", ) ) +SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_USER_INPUT_REQUIRED_POLLING_INTERVAL_IN_SECONDS = int( + environ.get( + "SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_USER_INPUT_REQUIRED_POLLING_INTERVAL_IN_SECONDS", + default="120", + ) +) SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = environ.get( "SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND", default="http://localhost:7001" ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index 45e83d7c..4daabd58 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -11,6 +11,7 @@ from urllib.parse import unquote import sentry_sdk from flask import current_app +from SpiffWorkflow.bpmn.specs.events.IntermediateEvent import _BoundaryEventParent # type: ignore from SpiffWorkflow.task import Task as SpiffTask # type: ignore from spiffworkflow_backend import db @@ -81,8 +82,25 @@ class ProcessInstanceService: process_model = ProcessModelService.get_process_model(process_model_identifier) return cls.create_process_instance(process_model, user) - @staticmethod - def do_waiting(status_value: str = ProcessInstanceStatus.waiting.value) -> None: + @classmethod + def ready_user_task_has_associated_timer(cls, processor: ProcessInstanceProcessor) -> bool: + for ready_user_task in processor.bpmn_process_instance.get_ready_user_tasks(): + if isinstance(ready_user_task.parent.task_spec, _BoundaryEventParent): + return True + return False + + @classmethod + def can_optimistically_skip(cls, processor: ProcessInstanceProcessor, status_value: str) -> bool: + if not current_app.config["SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_ALLOW_OPTIMISTIC_CHECKS"]: + return False + + if processor.process_instance_model.status != status_value: + return True + + return status_value == "user_input_required" and not cls.ready_user_task_has_associated_timer(processor) + + @classmethod + def do_waiting(cls, status_value: str = ProcessInstanceStatus.waiting.value) -> None: """Do_waiting.""" process_instance_ids_to_check = ProcessInstanceQueueService.peek_many(status_value) if len(process_instance_ids_to_check) == 0: @@ -100,6 +118,10 @@ class ProcessInstanceService: try: current_app.logger.info(f"Processing process_instance {process_instance.id}") processor = ProcessInstanceProcessor(process_instance) + if cls.can_optimistically_skip(processor, status_value): + current_app.logger.info(f"Optimistically skipped process_instance {process_instance.id}") + continue + processor.lock_process_instance(process_instance_lock_prefix) locked = True db.session.refresh(process_instance) From f8101379eee3029ad9286440087ae50c758458a9 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 21 Mar 2023 10:45:10 -0400 Subject: [PATCH 074/162] use consistent types for tasks in webui instance show page and mark the to task guid task as ready in backend --- .../routes/process_instances_controller.py | 132 ++---------------- spiffworkflow-frontend/src/interfaces.ts | 20 +++ .../src/routes/ProcessInstanceShow.tsx | 77 +++++----- 3 files changed, 68 insertions(+), 161 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index ccc46358..43ed6cef 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -610,54 +610,6 @@ def process_instance_task_list( most_recent_tasks_only: bool = False, ) -> flask.wrappers.Response: """Process_instance_task_list.""" - # step_detail_query = db.session.query(SpiffStepDetailsModel).filter( - # SpiffStepDetailsModel.process_instance_id == process_instance.id, - # ) - # - # if spiff_step > 0: - # step_detail_query = step_detail_query.filter(SpiffStepDetailsModel.spiff_step <= spiff_step) - # - # step_details = step_detail_query.all() - # - # processor = ProcessInstanceProcessor(process_instance) - # full_bpmn_process_dict = processor.full_bpmn_process_dict - # tasks = full_bpmn_process_dict["tasks"] - # subprocesses = full_bpmn_process_dict["subprocesses"] - # - # steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details} - # - # def restore_task(spiff_task: dict[str, Any], step_ended: float) -> None: - # if spiff_task["last_state_change"] > step_ended: - # spiff_task["state"] = Task.task_state_name_to_int("FUTURE") - # spiff_task["data"] = {} - # - # if spiff_step > 0: - # last_change = step_details[-1].end_in_seconds or 0 - # for spiff_task in tasks.values(): - # restore_task(spiff_task, last_change) - # for subprocess in subprocesses.values(): - # for spiff_task in subprocess["tasks"].values(): - # restore_task(spiff_task, last_change) - # - # bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict(full_bpmn_process_dict) - # if spiff_step > 0: - # bpmn_process_instance.complete_task_from_id(UUID(step_details[-1].task_id)) - # for subprocess_id, subprocess in bpmn_process_instance.subprocesses.items(): - # if not subprocess.is_completed(): - # task = bpmn_process_instance.get_task(subprocess_id) - # task._set_state(TaskState.WAITING) - - # guid: string; - # bpmn_identifier: string; - # - # bpmn_name?: string; - # - # state: string; - # typename: string; - - # calling_subprocess_task_guid: string; -> bpmn_process_direct_parent_guid - # call_activity_process_bpmn_identifier?: string; -> bpmn_process_direct_parent_bpmn_identifier - bpmn_process_ids = [] if bpmn_process_guid: bpmn_process = BpmnProcessModel.query.filter_by(guid=bpmn_process_guid).first() @@ -704,90 +656,24 @@ def process_instance_task_list( TaskDefinitionModel.bpmn_name, TaskDefinitionModel.typename, TaskDefinitionModel.properties_json.label('task_definition_properties_json'), # type: ignore + TaskModel.guid, + TaskModel.state, ) ) if len(bpmn_process_ids) > 0: - print(f"bpmn_process_ids: {bpmn_process_ids}") task_model_query = ( task_model_query.filter(bpmn_process_alias.id.in_(bpmn_process_ids)) ) task_models = task_model_query.all() - - # processor = ProcessInstanceProcessor(process_instance) - # full_bpmn_process_dict = processor.full_bpmn_process_dict - # tasks = full_bpmn_process_dict["tasks"] - # subprocesses = full_bpmn_process_dict["subprocesses"] - # - # steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details} - # - # def restore_task(spiff_task: dict[str, Any], step_ended: float) -> None: - # if spiff_task["last_state_change"] > step_ended: - # spiff_task["state"] = Task.task_state_name_to_int("FUTURE") - # spiff_task["data"] = {} - # - # if spiff_step > 0: - # last_change = step_details[-1].end_in_seconds or 0 - # for spiff_task in tasks.values(): - # restore_task(spiff_task, last_change) - # for subprocess in subprocesses.values(): - # for spiff_task in subprocess["tasks"].values(): - # restore_task(spiff_task, last_change) - # - # bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict(full_bpmn_process_dict) - # if spiff_step > 0: - # bpmn_process_instance.complete_task_from_id(UUID(step_details[-1].task_id)) - # for subprocess_id, subprocess in bpmn_process_instance.subprocesses.items(): - # if not subprocess.is_completed(): - # task = bpmn_process_instance.get_task(subprocess_id) - # task._set_state(TaskState.WAITING) - - # spiff_tasks = None - # if all_tasks: - # spiff_tasks = bpmn_process_instance.get_tasks(TaskState.ANY_MASK) - # else: - # spiff_tasks = processor.get_all_user_tasks() - # - # ( - # subprocesses_by_child_task_ids, - # task_typename_by_task_id, - # ) = processor.get_subprocesses_by_child_task_ids() - # processor.get_highest_level_calling_subprocesses_by_child_task_ids( - # subprocesses_by_child_task_ids, task_typename_by_task_id - # ) - # - # spiff_tasks_to_process = spiff_tasks - # if most_recent_tasks_only: - # spiff_tasks_by_process_id_and_task_name: dict[str, SpiffTask] = {} - # current_tasks = {} - # for spiff_task in spiff_tasks_to_process: - # row_id = f"{spiff_task.task_spec._wf_spec.name}:{spiff_task.task_spec.name}" - # if spiff_task.state in [TaskState.READY, TaskState.WAITING]: - # current_tasks[row_id] = spiff_task - # if ( - # row_id not in spiff_tasks_by_process_id_and_task_name - # or spiff_task.state > spiff_tasks_by_process_id_and_task_name[row_id].state - # ): - # spiff_tasks_by_process_id_and_task_name[row_id] = spiff_task - # spiff_tasks_by_process_id_and_task_name.update(current_tasks) - # spiff_tasks_to_process = spiff_tasks_by_process_id_and_task_name.values() - # - # response = [] - # for spiff_task in spiff_tasks_to_process: - # task_spiff_step: Optional[int] = None - # if str(spiff_task.id) in steps_by_id: - # task_spiff_step = steps_by_id[str(spiff_task.id)].spiff_step - # calling_subprocess_task_id = subprocesses_by_child_task_ids.get(str(spiff_task.id), None) - # task = ProcessInstanceService.spiff_task_to_api_task( - # processor, - # spiff_task, - # calling_subprocess_task_id=calling_subprocess_task_id, - # task_spiff_step=task_spiff_step, - # ) - # if task.state in ["MAYBE", "LIKELY"]: - # task.state = "FUTURE" - # response.append(task) + # import pdb; pdb.set_trace() + if to_task_guid is not None: + task_models_dict = json.loads(current_app.json.dumps(task_models)) + for task_model in task_models_dict: + if task_model['guid'] == to_task_guid and task_model['state'] == "COMPLETED": + task_model['state'] = "READY" + return make_response(jsonify(task_models_dict), 200) return make_response(jsonify(task_models), 200) diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index 8b61f474..4e65bd02 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -21,10 +21,22 @@ export interface RecentProcessModel { processModelDisplayName: string; } +export interface TaskPropertiesJson { + parent: string; +} + export interface TaskDefinitionPropertiesJson { spec: string; } +export interface EventDefinition { + typename: string; + payload: any; + event_definitions: [EventDefinition]; + + message_var?: string; +} + export interface Task { id: number; guid: string; @@ -37,12 +49,20 @@ export interface Task { data: any; state: string; typename: string; + properties_json: TaskPropertiesJson; task_definition_properties_json: TaskDefinitionPropertiesJson; + event_definition?: EventDefinition; + // TOOD: DELETE THIS! task_spiff_step?: number; } +export interface TaskIds { + completed: Task[]; + readyOrWaiting: Task[]; +} + export interface ProcessInstanceTask { id: string; task_id: string; diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 93231407..fb5b9b3b 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -42,12 +42,14 @@ import { import ButtonWithConfirmation from '../components/ButtonWithConfirmation'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; import { + EventDefinition, PermissionsToCheck, ProcessData, ProcessInstance, ProcessInstanceMetadata, Task, TaskDefinitionPropertiesJson, + TaskIds, } from '../interfaces'; import { usePermissionFetcher } from '../hooks/PermissionService'; import ProcessInstanceClass from '../classes/ProcessInstanceClass'; @@ -215,14 +217,14 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; const getTaskIds = () => { - const taskIds = { completed: [], readyOrWaiting: [] }; + const taskIds: TaskIds = { completed: [], readyOrWaiting: [] }; if (tasks) { tasks.forEach(function getUserTasksElement(task: Task) { if (task.state === 'COMPLETED') { - (taskIds.completed as any).push(task); + taskIds.completed.push(task); } if (task.state === 'READY' || task.state === 'WAITING') { - (taskIds.readyOrWaiting as any).push(task); + taskIds.readyOrWaiting.push(task); } return null; }); @@ -230,20 +232,14 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return taskIds; }; - const currentSpiffStep = () => { - if (processInstance && typeof params.to_task_guid === 'undefined') { - return processInstance.spiff_step || 0; - } - - return Number(params.spiff_step); - }; - - const showingFirstSpiffStep = () => { - return currentSpiffStep() === 1; + const currentToTaskGuid = () => { + return params.to_task_guid; }; const showingLastSpiffStep = () => { - return processInstance && currentSpiffStep() === processInstance.spiff_step; + return ( + processInstance && currentToTaskGuid() === processInstance.spiff_step + ); }; const completionViewLink = (label: any, taskGuid: string) => { @@ -278,7 +274,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const resetProcessInstance = () => { HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceResetPath}/${currentSpiffStep()}`, + path: `${targetUris.processInstanceResetPath}/${currentToTaskGuid()}`, successCallback: returnToLastSpiffStep, httpMethod: 'POST', }); @@ -580,7 +576,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const getTaskById = (taskId: string) => { if (tasks !== null) { - return tasks.find((task: any) => task.id === taskId); + return tasks.find((task: Task) => task.guid === taskId) || null; } return null; }; @@ -589,24 +585,29 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { console.log('result', result); }; + const getParentTaskFromTask = (task: Task) => { + return task.properties_json.parent; + }; + const createScriptUnitTest = () => { if (taskToDisplay) { - const taskToUse: any = taskToDisplay; - const previousTask: any = getTaskById(taskToUse.parent); + const previousTask: Task | null = getTaskById( + getParentTaskFromTask(taskToDisplay) + ); HttpService.makeCallToBackend({ path: `/process-models/${modifiedProcessModelId}/script-unit-tests`, httpMethod: 'POST', successCallback: processScriptUnitTestCreateResult, postBody: { - bpmn_task_identifier: taskToUse.bpmn_identifier, - input_json: previousTask.data, - expected_output_json: taskToUse.data, + bpmn_task_identifier: taskToDisplay.bpmn_identifier, + input_json: previousTask ? previousTask.data : '', + expected_output_json: taskToDisplay.data, }, }); } }; - const isCurrentTask = (task: any) => { + const isCurrentTask = (task: Task) => { const subprocessTypes = [ 'Subprocess', 'Call Activity', @@ -619,7 +620,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const canEditTaskData = (task: any) => { + const canEditTaskData = (task: Task) => { return ( processInstance && ability.can('PUT', targetUris.processInstanceTaskDataPath) && @@ -629,7 +630,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const canSendEvent = (task: any) => { + const canSendEvent = (task: Task) => { // We actually could allow this for any waiting events const taskTypes = ['Event Based Gateway']; return ( @@ -642,7 +643,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const canCompleteTask = (task: any) => { + const canCompleteTask = (task: Task) => { return ( processInstance && processInstance.status === 'suspended' && @@ -652,7 +653,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const canResetProcess = (task: any) => { + const canResetProcess = (task: Task) => { return ( ability.can('POST', targetUris.processInstanceResetPath) && processInstance && @@ -662,8 +663,8 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const getEvents = (task: any) => { - const handleMessage = (eventDefinition: any) => { + const getEvents = (task: Task) => { + const handleMessage = (eventDefinition: EventDefinition) => { if (eventDefinition.typename === 'MessageEventDefinition') { const newEvent = eventDefinition; delete newEvent.message_var; @@ -673,7 +674,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return eventDefinition; }; if (task.event_definition && task.event_definition.event_definitions) - return task.event_definition.event_definitions.map((e: any) => + return task.event_definition.event_definitions.map((e: EventDefinition) => handleMessage(e) ); if (task.event_definition) return [handleMessage(task.event_definition)]; @@ -710,11 +711,10 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { if (!taskToDisplay) { return; } - console.log('saveTaskData'); removeError(); // taskToUse is copy of taskToDisplay, with taskDataToDisplay in data attribute - const taskToUse: any = { ...taskToDisplay, data: taskDataToDisplay }; + const taskToUse: Task = { ...taskToDisplay, data: taskDataToDisplay }; HttpService.makeCallToBackend({ path: `${targetUris.processInstanceTaskDataPath}/${taskToUse.id}`, httpMethod: 'PUT', @@ -739,13 +739,14 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; const completeTask = (execute: boolean) => { - const taskToUse: any = taskToDisplay; - HttpService.makeCallToBackend({ - path: `/task-complete/${modifiedProcessModelId}/${params.process_instance_id}/${taskToUse.id}`, - httpMethod: 'POST', - successCallback: returnToLastSpiffStep, - postBody: { execute }, - }); + if (taskToDisplay) { + HttpService.makeCallToBackend({ + path: `/task-complete/${modifiedProcessModelId}/${params.process_instance_id}/${taskToDisplay.guid}`, + httpMethod: 'POST', + successCallback: returnToLastSpiffStep, + postBody: { execute }, + }); + } }; const taskDisplayButtons = (task: Task) => { From 92b0aa96a139ceec901daec403625a2159e6bcbf Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 21 Mar 2023 11:29:14 -0400 Subject: [PATCH 075/162] add SPIFFWORKFLOW_BACKEND_SENTRY_ENV_IDENTIFIER config --- spiffworkflow-backend/src/spiffworkflow_backend/__init__.py | 6 +++++- .../src/spiffworkflow_backend/config/default.py | 1 + 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py index 3d216dc6..68f16ddf 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py @@ -242,12 +242,16 @@ def configure_sentry(app: flask.app.Flask) -> None: if sentry_traces_sample_rate is None: raise Exception("SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE is not set somehow") + sentry_env_identifier = app.config["ENV_IDENTIFIER"] + if app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ENV_IDENTIFIER"): + sentry_env_identifier = app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ENV_IDENTIFIER") + sentry_configs = { "dsn": app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_DSN"), "integrations": [ FlaskIntegration(), ], - "environment": app.config["ENV_IDENTIFIER"], + "environment": sentry_env_identifier, # sample_rate is the errors sample rate. we usually set it to 1 (100%) # so we get all errors in sentry. "sample_rate": float(sentry_errors_sample_rate), diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py index 1805e8af..5c51e294 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py @@ -88,6 +88,7 @@ SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG = environ.get( "SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG", default=None ) SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG = environ.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG", default=None) +SPIFFWORKFLOW_BACKEND_SENTRY_ENV_IDENTIFIER = environ.get("SPIFFWORKFLOW_BACKEND_SENTRY_ENV_IDENTIFIER", default=None) SPIFFWORKFLOW_BACKEND_SENTRY_PROFILING_ENABLED = ( environ.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROFILING_ENABLED", default="false") == "true" ) From 63369387c6351edcf1f5d8917ab74e951cad2abc Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 21 Mar 2023 13:34:59 -0400 Subject: [PATCH 076/162] pyl --- spiffworkflow-backend/migrations/env.py | 2 - .../src/spiffworkflow_backend/models/task.py | 1 - .../routes/process_instances_controller.py | 44 +++++++++---------- .../routes/tasks_controller.py | 7 +-- .../services/task_service.py | 15 +++++-- .../manual_task_with_subprocesses.bpmn | 2 +- .../unit/test_process_instance_processor.py | 8 +++- .../src/routes/ProcessInstanceShow.tsx | 2 - 8 files changed, 41 insertions(+), 40 deletions(-) diff --git a/spiffworkflow-backend/migrations/env.py b/spiffworkflow-backend/migrations/env.py index 68feded2..630e381a 100644 --- a/spiffworkflow-backend/migrations/env.py +++ b/spiffworkflow-backend/migrations/env.py @@ -1,5 +1,3 @@ -from __future__ import with_statement - import logging from logging.config import fileConfig diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index a3e182c0..c1e85c57 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -80,7 +80,6 @@ class Task: HUMAN_TASK_TYPES = ["User Task", "Manual Task"] - def __init__( self, id: str, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 43ed6cef..5fa45126 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -1,13 +1,9 @@ """APIs for dealing with process groups, process models, and process instances.""" import base64 -from spiffworkflow_backend.services.task_service import TaskService -from sqlalchemy.orm import aliased -from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel import json from typing import Any from typing import Dict from typing import Optional -from uuid import UUID import flask.wrappers from flask import current_app @@ -16,12 +12,12 @@ from flask import jsonify from flask import make_response from flask import request from flask.wrappers import Response -from SpiffWorkflow.task import Task as SpiffTask # type: ignore -from SpiffWorkflow.task import TaskState from sqlalchemy import and_ from sqlalchemy import or_ +from sqlalchemy.orm import aliased from spiffworkflow_backend.exceptions.api_error import ApiError +from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.human_task import HumanTaskModel @@ -46,7 +42,6 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel -from spiffworkflow_backend.models.task import Task from spiffworkflow_backend.models.task import TaskModel from spiffworkflow_backend.models.task_definition import TaskDefinitionModel from spiffworkflow_backend.models.user import UserModel @@ -88,6 +83,7 @@ from spiffworkflow_backend.services.process_instance_service import ( ) from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.spec_file_service import SpecFileService +from spiffworkflow_backend.services.task_service import TaskService def process_instance_create( @@ -625,9 +621,7 @@ def process_instance_task_list( if to_task_model is None: raise ApiError( error_code="task_not_found", - message=( - f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" - ), + message=f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'", status_code=400, ) task_model_query = task_model_query.filter(TaskModel.end_in_seconds <= to_task_model.end_in_seconds) @@ -637,13 +631,18 @@ def process_instance_task_list( direct_parent_bpmn_process_definition_alias = aliased(BpmnProcessDefinitionModel) task_model_query = ( - task_model_query.order_by( - TaskModel.id.desc() # type: ignore - ) + task_model_query.order_by(TaskModel.id.desc()) # type: ignore .join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id) .join(bpmn_process_alias, bpmn_process_alias.id == TaskModel.bpmn_process_id) - .outerjoin(direct_parent_bpmn_process_alias, direct_parent_bpmn_process_alias.id == bpmn_process_alias.direct_parent_process_id) - .outerjoin(direct_parent_bpmn_process_definition_alias, direct_parent_bpmn_process_definition_alias.id == direct_parent_bpmn_process_alias.bpmn_process_definition_id) + .outerjoin( + direct_parent_bpmn_process_alias, + direct_parent_bpmn_process_alias.id == bpmn_process_alias.direct_parent_process_id, + ) + .outerjoin( + direct_parent_bpmn_process_definition_alias, + direct_parent_bpmn_process_definition_alias.id + == direct_parent_bpmn_process_alias.bpmn_process_definition_id, + ) .join( BpmnProcessDefinitionModel, BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id ) @@ -651,28 +650,27 @@ def process_instance_task_list( BpmnProcessDefinitionModel.bpmn_identifier.label("bpmn_process_definition_identifier"), # type: ignore BpmnProcessDefinitionModel.bpmn_name.label("bpmn_process_definition_name"), # type: ignore direct_parent_bpmn_process_alias.guid.label("bpmn_process_direct_parent_guid"), - direct_parent_bpmn_process_definition_alias.bpmn_identifier.label("bpmn_process_direct_parent_bpmn_identifier"), + direct_parent_bpmn_process_definition_alias.bpmn_identifier.label( + "bpmn_process_direct_parent_bpmn_identifier" + ), TaskDefinitionModel.bpmn_identifier, TaskDefinitionModel.bpmn_name, TaskDefinitionModel.typename, - TaskDefinitionModel.properties_json.label('task_definition_properties_json'), # type: ignore + TaskDefinitionModel.properties_json.label("task_definition_properties_json"), # type: ignore TaskModel.guid, TaskModel.state, ) ) if len(bpmn_process_ids) > 0: - task_model_query = ( - task_model_query.filter(bpmn_process_alias.id.in_(bpmn_process_ids)) - ) + task_model_query = task_model_query.filter(bpmn_process_alias.id.in_(bpmn_process_ids)) task_models = task_model_query.all() - # import pdb; pdb.set_trace() if to_task_guid is not None: task_models_dict = json.loads(current_app.json.dumps(task_models)) for task_model in task_models_dict: - if task_model['guid'] == to_task_guid and task_model['state'] == "COMPLETED": - task_model['state'] = "READY" + if task_model["guid"] == to_task_guid and task_model["state"] == "COMPLETED": + task_model["state"] = "READY" return make_response(jsonify(task_models_dict), 200) return make_response(jsonify(task_models), 200) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 37c29575..495d22de 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -1,6 +1,5 @@ """APIs for dealing with process groups, process models, and process instances.""" import json -from spiffworkflow_backend.models.task import TaskModel # noqa: F401 import os import uuid from sys import exc_info @@ -37,8 +36,8 @@ from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus from spiffworkflow_backend.models.process_model import ProcessModelInfo -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.task import Task +from spiffworkflow_backend.models.task import TaskModel # noqa: F401 from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.routes.process_api_blueprint import ( _find_principal_or_raise, @@ -176,9 +175,7 @@ def task_data_show( if task_model is None: raise ApiError( error_code="task_not_found", - message=( - f"Cannot find a task with guid '{task_guid}' for process instance '{process_instance_id}'" - ), + message=f"Cannot find a task with guid '{task_guid}' for process instance '{process_instance_id}'", status_code=400, ) task_model.data = task_model.json_data() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index fa902406..159a54d8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -13,7 +13,8 @@ from SpiffWorkflow.task import TaskStateNames from sqlalchemy.dialects.mysql import insert as mysql_insert from sqlalchemy.dialects.postgresql import insert as postgres_insert -from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel, BpmnProcessNotFoundError +from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel +from spiffworkflow_backend.models.bpmn_process import BpmnProcessNotFoundError from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401 from spiffworkflow_backend.models.process_instance import ProcessInstanceModel @@ -204,7 +205,9 @@ class TaskService: direct_bpmn_process_parent = top_level_process for subprocess_guid, subprocess in subprocesses.items(): if subprocess == spiff_workflow.outer_workflow: - direct_bpmn_process_parent = BpmnProcessModel.query.filter_by(guid=str(subprocess_guid)).first() + direct_bpmn_process_parent = BpmnProcessModel.query.filter_by( + guid=str(subprocess_guid) + ).first() if direct_bpmn_process_parent is None: raise BpmnProcessNotFoundError( f"Could not find bpmn process with guid: {str(subprocess_guid)} " @@ -212,7 +215,9 @@ class TaskService: ) if direct_bpmn_process_parent is None: - raise BpmnProcessNotFoundError(f"Could not find a direct bpmn process parent for guid: {bpmn_process_guid}") + raise BpmnProcessNotFoundError( + f"Could not find a direct bpmn process parent for guid: {bpmn_process_guid}" + ) bpmn_process.direct_parent_process_id = direct_bpmn_process_parent.id @@ -305,7 +310,9 @@ class TaskService: @classmethod def bpmn_process_and_descendants(cls, bpmn_processes: list[BpmnProcessModel]) -> list[BpmnProcessModel]: bpmn_process_ids = [p.id for p in bpmn_processes] - direct_children = BpmnProcessModel.query.filter(BpmnProcessModel.direct_parent_process_id.in_(bpmn_process_ids)).all() # type: ignore + direct_children = BpmnProcessModel.query.filter( + BpmnProcessModel.direct_parent_process_id.in_(bpmn_process_ids) # type: ignore + ).all() if len(direct_children) > 0: return bpmn_processes + cls.bpmn_process_and_descendants(direct_children) return bpmn_processes diff --git a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn index 680903f5..939c8c0b 100644 --- a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn @@ -151,4 +151,4 @@ except: - \ No newline at end of file + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 0b80a46c..9ca008ec 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -365,7 +365,9 @@ class TestProcessInstanceProcessor(BaseTest): assert len(all_spiff_tasks) > 1 for spiff_task in all_spiff_tasks: assert spiff_task.state == TaskState.COMPLETED - assert_spiff_task_is_in_process("test_process_to_call_subprocess_script", "test_process_to_call_subprocess") + assert_spiff_task_is_in_process( + "test_process_to_call_subprocess_script", "test_process_to_call_subprocess" + ) assert_spiff_task_is_in_process("top_level_subprocess_script", "top_level_subprocess") assert_spiff_task_is_in_process("top_level_script", "top_level_process") @@ -389,7 +391,9 @@ class TestProcessInstanceProcessor(BaseTest): assert bpmn_process_definition is not None assert bpmn_process_definition.bpmn_identifier == "test_process_to_call_subprocess" assert bpmn_process.direct_parent_process_id is not None - direct_parent_process = BpmnProcessModel.query.filter_by(id=bpmn_process.direct_parent_process_id).first() + direct_parent_process = BpmnProcessModel.query.filter_by( + id=bpmn_process.direct_parent_process_id + ).first() assert direct_parent_process is not None assert direct_parent_process.bpmn_process_definition.bpmn_identifier == "test_process_to_call" diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index fb5b9b3b..4fa70d6c 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -7,12 +7,10 @@ import { useSearchParams, } from 'react-router-dom'; import { - CaretRight, TrashCan, StopOutline, PauseOutline, PlayOutline, - CaretLeft, InProgress, Checkmark, Warning, From 26e65dd51f9e0dcc98eb72443010f924961cb514 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 22 Mar 2023 09:44:13 -0400 Subject: [PATCH 077/162] resetting tasks somewhat work --- .../src/spiffworkflow_backend/api.yml | 24 +--- .../src/spiffworkflow_backend/models/task.py | 2 +- .../routes/process_instances_controller.py | 15 +- .../services/process_instance_processor.py | 128 +++++++++++++----- .../services/task_service.py | 51 +++++++ .../src/routes/ProcessInstanceShow.tsx | 2 +- 6 files changed, 152 insertions(+), 70 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 1a21e643..f7fa3f03 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -901,12 +901,6 @@ paths: description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity. schema: type: string - - name: all_tasks - in: query - required: false - description: If true, this wil return all tasks associated with the process instance and not just user tasks. - schema: - type: boolean - name: most_recent_tasks_only in: query required: false @@ -960,12 +954,6 @@ paths: description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity. schema: type: string - - name: all_tasks - in: query - required: false - description: If true, this wil return all tasks associated with the process instance and not just user tasks. - schema: - type: boolean - name: most_recent_tasks_only in: query required: false @@ -1188,7 +1176,7 @@ paths: schema: $ref: "#/components/schemas/OkTrue" - /process-instance-reset/{modified_process_model_identifier}/{process_instance_id}/{spiff_step}: + /process-instance-reset/{modified_process_model_identifier}/{process_instance_id}/{to_task_guid}: parameters: - name: modified_process_model_identifier in: path @@ -1202,12 +1190,12 @@ paths: description: The unique id of an existing process instance. schema: type: integer - - name: spiff_step - in: query - required: false - description: Reset the process to this state + - name: to_task_guid + in: path + required: true + description: Get the tasks only up to the given guid. schema: - type: integer + type: string post: operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_reset summary: Reset a process instance to an earlier step diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index c1e85c57..dbdd429e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -63,7 +63,7 @@ class TaskModel(SpiffworkflowBaseDBModel): json_data_hash: str = db.Column(db.String(255), nullable=False, index=True) python_env_data_hash: str = db.Column(db.String(255), nullable=False, index=True) - start_in_seconds: float = db.Column(db.DECIMAL(17, 6)) + start_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6)) end_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6)) data: Optional[dict] = None diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 5fa45126..b0cde36f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -555,8 +555,6 @@ def process_instance_report_show( def process_instance_task_list_without_task_data_for_me( modified_process_model_identifier: str, process_instance_id: int, - all_tasks: bool = False, - spiff_step: int = 0, most_recent_tasks_only: bool = False, bpmn_process_guid: Optional[str] = None, to_task_guid: Optional[str] = None, @@ -566,8 +564,6 @@ def process_instance_task_list_without_task_data_for_me( return process_instance_task_list( _modified_process_model_identifier=modified_process_model_identifier, process_instance=process_instance, - all_tasks=all_tasks, - spiff_step=spiff_step, most_recent_tasks_only=most_recent_tasks_only, bpmn_process_guid=bpmn_process_guid, to_task_guid=to_task_guid, @@ -577,8 +573,6 @@ def process_instance_task_list_without_task_data_for_me( def process_instance_task_list_without_task_data( modified_process_model_identifier: str, process_instance_id: int, - all_tasks: bool = False, - spiff_step: int = 0, most_recent_tasks_only: bool = False, bpmn_process_guid: Optional[str] = None, to_task_guid: Optional[str] = None, @@ -588,8 +582,6 @@ def process_instance_task_list_without_task_data( return process_instance_task_list( _modified_process_model_identifier=modified_process_model_identifier, process_instance=process_instance, - all_tasks=all_tasks, - spiff_step=spiff_step, most_recent_tasks_only=most_recent_tasks_only, bpmn_process_guid=bpmn_process_guid, to_task_guid=to_task_guid, @@ -600,8 +592,6 @@ def process_instance_task_list( _modified_process_model_identifier: str, process_instance: ProcessInstanceModel, bpmn_process_guid: Optional[str] = None, - all_tasks: bool = False, - spiff_step: int = 0, to_task_guid: Optional[str] = None, most_recent_tasks_only: bool = False, ) -> flask.wrappers.Response: @@ -679,12 +669,11 @@ def process_instance_task_list( def process_instance_reset( process_instance_id: int, modified_process_model_identifier: str, - spiff_step: int = 0, + to_task_guid: str, ) -> flask.wrappers.Response: """Reset a process instance to a particular step.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) - processor = ProcessInstanceProcessor(process_instance) - processor.reset_process(spiff_step) + ProcessInstanceProcessor.reset_process(process_instance, to_task_guid, commit=True) return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index fdd42cb9..ec741f32 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -52,6 +52,8 @@ from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ign from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore +from sqlalchemy import and_ +from sqlalchemy import or_ from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel @@ -85,7 +87,8 @@ from spiffworkflow_backend.models.script_attributes_context import ( ) from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel -from spiffworkflow_backend.models.task import TaskModel # noqa: F401 +from spiffworkflow_backend.models.task import TaskModel +from spiffworkflow_backend.models.task import TaskNotFoundError from spiffworkflow_backend.models.task_definition import TaskDefinitionModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.scripts.script import Script @@ -154,10 +157,6 @@ class SpiffStepDetailIsMissingError(Exception): pass -class TaskNotFoundError(Exception): - pass - - class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # type: ignore def __init__(self, environment_globals: Dict[str, Any]): """BoxedTaskDataBasedScriptEngineEnvironment.""" @@ -1312,48 +1311,103 @@ class ProcessInstanceProcessor: # Saving the workflow seems to reset the status self.suspend() - def reset_process(self, spiff_step: int) -> None: + @classmethod + def reset_process( + cls, process_instance: ProcessInstanceModel, to_task_guid: str, commit: Optional[bool] = False + ) -> None: """Reset a process to an earlier state.""" - spiff_logger = logging.getLogger("spiff") - spiff_logger.info( - f"Process reset from step {spiff_step}", - extra=self.bpmn_process_instance.log_info(), + cls.add_event_to_process_instance( + process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid ) - step_detail = ( - db.session.query(SpiffStepDetailsModel) - .filter( - SpiffStepDetailsModel.process_instance_id == self.process_instance_model.id, - SpiffStepDetailsModel.spiff_step == spiff_step, + to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() + if to_task_model is None: + raise TaskNotFoundError( + f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" ) - .first() + + parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes( + to_task_model ) - if step_detail is not None: - self.increment_spiff_step() - self.add_step( - { - "process_instance_id": self.process_instance_model.id, - "spiff_step": self.process_instance_model.spiff_step or 1, - "task_json": step_detail.task_json, - "timestamp": round(time.time()), - } + task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] + parent_bpmn_processes_ids = [p.id for p in parent_bpmn_processes] + tasks_to_update_query = db.session.query(TaskModel).filter( + and_( + or_( + TaskModel.end_in_seconds > to_task_model.end_in_seconds, + TaskModel.end_in_seconds.is_not(None), # type: ignore + ), + TaskModel.process_instance_id == process_instance.id, + TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore + ) + ) + tasks_to_update = tasks_to_update_query.all() + + # run all queries before making changes to task_model + if commit: + tasks_to_delete_query = db.session.query(TaskModel).filter( + and_( + or_( + TaskModel.end_in_seconds > to_task_model.end_in_seconds, + TaskModel.end_in_seconds.is_not(None), # type: ignore + ), + TaskModel.process_instance_id == process_instance.id, + TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore + TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore + ) ) - dct = self._serializer.workflow_to_dict(self.bpmn_process_instance) - dct["tasks"] = step_detail.task_json["tasks"] - dct["subprocesses"] = step_detail.task_json["subprocesses"] - self.bpmn_process_instance = self._serializer.workflow_from_dict(dct) + tasks_to_delete = tasks_to_delete_query.all() - # Cascade does not seems to work on filters, only directly through the session - tasks = self.bpmn_process_instance.get_tasks(TaskState.NOT_FINISHED_MASK) - rows = HumanTaskModel.query.filter( - HumanTaskModel.task_id.in_(str(t.id) for t in tasks) # type: ignore + # delete any later tasks from to_task_model and delete bpmn processes that may be + # link directly to one of those tasks. + tasks_to_delete_guids = [t.guid for t in tasks_to_delete] + tasks_to_delete_ids = [t.id for t in tasks_to_delete] + bpmn_processes_to_delete = BpmnProcessModel.query.filter( + BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore + ).all() + human_tasks_to_delete = HumanTaskModel.query.filter( + HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore ).all() - for row in rows: - db.session.delete(row) - self.save() - self.suspend() + # ensure the correct order for foreign keys + for human_task_to_delete in human_tasks_to_delete: + db.session.delete(human_task_to_delete) + db.session.commit() + for task_to_delete in tasks_to_delete: + db.session.delete(task_to_delete) + db.session.commit() + for bpmn_process_to_delete in bpmn_processes_to_delete: + db.session.delete(bpmn_process_to_delete) + db.session.commit() + + related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() + if related_human_task is not None: + db.session.delete(related_human_task) + + for task_to_update in tasks_to_update: + TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) + + parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first() + if parent_task_model is None: + raise TaskNotFoundError( + f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" + ) + + TaskService.reset_task_model( + to_task_model, + state="READY", + json_data_hash=parent_task_model.json_data_hash, + python_env_data_hash=parent_task_model.python_env_data_hash, + commit=commit, + ) + for task_model in task_models_of_parent_bpmn_processes: + TaskService.reset_task_model(task_model, state="WAITING", commit=commit) + + if commit: + processor = ProcessInstanceProcessor(process_instance) + processor.save() + processor.suspend() @staticmethod def get_parser() -> MyCustomParser: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 159a54d8..918de4d8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -9,6 +9,7 @@ from flask import current_app from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflow # type: ignore from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer from SpiffWorkflow.task import Task as SpiffTask # type: ignore +from SpiffWorkflow.task import TaskState from SpiffWorkflow.task import TaskStateNames from sqlalchemy.dialects.mysql import insert as mysql_insert from sqlalchemy.dialects.postgresql import insert as postgres_insert @@ -317,6 +318,56 @@ class TaskService: return bpmn_processes + cls.bpmn_process_and_descendants(direct_children) return bpmn_processes + @classmethod + def task_models_of_parent_bpmn_processes( + cls, task_model: TaskModel + ) -> Tuple[list[BpmnProcessModel], list[TaskModel]]: + bpmn_process = task_model.bpmn_process + task_models: list[TaskModel] = [] + bpmn_processes: list[BpmnProcessModel] = [bpmn_process] + if bpmn_process.guid is not None: + parent_task_model = TaskModel.query.filter_by(guid=bpmn_process.guid).first() + if parent_task_model is not None: + b, t = cls.task_models_of_parent_bpmn_processes(parent_task_model) + return (bpmn_processes + b, [parent_task_model] + t) + return (bpmn_processes, task_models) + + @classmethod + def reset_task_model( + cls, + task_model: TaskModel, + state: str, + commit: Optional[bool] = True, + json_data_hash: Optional[str] = None, + python_env_data_hash: Optional[str] = None, + ) -> None: + if json_data_hash is None: + TaskService.update_task_data_on_task_model(task_model, {}, "json_data_hash") + else: + task_model.json_data_hash = json_data_hash + if python_env_data_hash is None: + TaskService.update_task_data_on_task_model(task_model, {}, "python_env_data") + else: + task_model.python_env_data_hash = python_env_data_hash + + new_properties_json = task_model.properties_json + task_model.state = state + task_model.start_in_seconds = None + task_model.end_in_seconds = None + + if commit: + db.session.add(task_model) + db.session.commit() + + new_properties_json["state"] = getattr(TaskState, state) + task_model.properties_json = new_properties_json + + if commit: + # if we commit the properties json at the same time as the other items + # the json gets reset for some reason. + db.session.add(task_model) + db.session.commit() + @classmethod def _create_task( cls, diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 4fa70d6c..74e6e1a8 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -144,7 +144,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { path: `${apiPath}/${modifiedProcessModelId}/${params.process_instance_id}${queryParams}`, successCallback: setProcessInstance, }); - let taskParams = '?all_tasks=true&most_recent_tasks_only=true'; + let taskParams = '?most_recent_tasks_only=true'; if (typeof params.to_task_guid !== 'undefined') { taskParams = `${taskParams}&to_task_guid=${params.to_task_guid}`; } From c63483b171d58932b0ef4a00616690314a0e2086 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 22 Mar 2023 10:45:40 -0400 Subject: [PATCH 078/162] updated manual task with subprocess bpmn w/ burnettk --- .../services/process_instance_processor.py | 2 +- .../services/task_service.py | 4 +- .../manual_task_with_subprocesses.bpmn | 14 +-- .../test_process_to_call.bpmn | 115 ++++++++++++++---- .../unit/test_process_instance_processor.py | 62 +++++++++- 5 files changed, 154 insertions(+), 43 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index ec741f32..c97fa733 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1335,7 +1335,7 @@ class ProcessInstanceProcessor: and_( or_( TaskModel.end_in_seconds > to_task_model.end_in_seconds, - TaskModel.end_in_seconds.is_not(None), # type: ignore + TaskModel.end_in_seconds.is_(None), # type: ignore ), TaskModel.process_instance_id == process_instance.id, TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 918de4d8..f75d955c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -342,11 +342,11 @@ class TaskService: python_env_data_hash: Optional[str] = None, ) -> None: if json_data_hash is None: - TaskService.update_task_data_on_task_model(task_model, {}, "json_data_hash") + cls.update_task_data_on_task_model(task_model, {}, "json_data_hash") else: task_model.json_data_hash = json_data_hash if python_env_data_hash is None: - TaskService.update_task_data_on_task_model(task_model, {}, "python_env_data") + cls.update_task_data_on_task_model(task_model, {}, "python_env_data") else: task_model.python_env_data_hash = python_env_data_hash diff --git a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn index 939c8c0b..f49f99cd 100644 --- a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn @@ -22,11 +22,10 @@ set_in_top_level_script = 1 - Flow_09gjylo - Flow_1i7syph + Flow_0yxus36 Flow_00k1tii @@ -48,7 +47,7 @@ except: - Flow_1i7syph + Flow_0yxus36 Flow_187mcqe @@ -67,6 +66,7 @@ except: set_top_level_process_script_after_gate = 1 + @@ -102,10 +102,6 @@ except: - - - - @@ -128,6 +124,10 @@ except: + + + + diff --git a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn index afda130a..2bdce678 100644 --- a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn @@ -2,70 +2,131 @@ - Flow_095sred + Flow_089aeua Flow_1qsx5et Flow_1qsx5et - Flow_095sred + Flow_0zedtvv - Flow_12zb3j0 + Flow_0bkk554 - Flow_12zb3j0 - Flow_0iu4d71 - set_in_test_process_to_call_script = 1 + Flow_1cnuh2a + Flow_17hgw9g + set_in_test_process_to_call_subprocess_script = 1 - Flow_0iu4d71 + Flow_17hgw9g - - + + + + + Flow_0bkk554 + Flow_1cnuh2a + + Flow_1nri60d + + + + Flow_1bfzrzu + + + + Flow_1nri60d + Flow_1bfzrzu + set_in_test_process_to_call_subprocess_subprocess_script = 1 + + - + + + + Flow_0zedtvv + Flow_089aeua + set_in_test_process_to_call_script = 1 + + + + - - - + + + + - + - + + + + + - - - - - - - - - + + + + + + + + + + + + - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 9ca008ec..d1f5da24 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -256,6 +256,54 @@ class TestProcessInstanceProcessor(BaseTest): assert spiff_task is not None assert spiff_task.state == TaskState.COMPLETED + def test_properly_resets_process_to_given_task( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + initiator_user = self.find_or_create_user("initiator_user") + finance_user_three = self.find_or_create_user("testuser3") + assert initiator_user.principal is not None + assert finance_user_three.principal is not None + AuthorizationService.import_permissions_from_yaml_file() + + finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() + assert finance_group is not None + + process_model = load_test_spec( + process_model_id="test_group/manual_task_with_subprocesses", + process_model_source_directory="manual_task_with_subprocesses", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=initiator_user + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + assert len(process_instance.active_human_tasks) == 1 + initial_human_task_id = process_instance.active_human_tasks[0].id + + # save again to ensure we go attempt to process the human tasks again + processor.save() + + assert len(process_instance.active_human_tasks) == 1 + assert initial_human_task_id == process_instance.active_human_tasks[0].id + + processor = ProcessInstanceProcessor(process_instance) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( + human_task_one.task_name, processor.bpmn_process_instance + ) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + + # process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + # processor = ProcessInstanceProcessor(process_instance) + # human_task_one = process_instance.active_human_tasks[0] + # spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + def test_properly_saves_tasks_when_running( self, app: Flask, @@ -263,7 +311,6 @@ class TestProcessInstanceProcessor(BaseTest): with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - """Test_does_not_recreate_human_tasks_on_multiple_saves.""" self.create_process_group(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") finance_user_three = self.find_or_create_user("testuser3") @@ -317,7 +364,7 @@ class TestProcessInstanceProcessor(BaseTest): } third_data_set = { **second_data_set, - **{"set_in_test_process_to_call_script": 1}, + **{"set_in_test_process_to_call_script": 1, "set_in_test_process_to_call_subprocess_subprocess_script": 1, "set_in_test_process_to_call_subprocess_script": 1}, } fourth_data_set = {**third_data_set, **{"a": 1, "we_move_on": True}} fifth_data_set = {**fourth_data_set, **{"validate_only": False, "set_top_level_process_script_after_gate": 1}} @@ -338,10 +385,13 @@ class TestProcessInstanceProcessor(BaseTest): # TODO: also check task data here from the spiff_task directly to ensure we hydrated spiff correctly def assert_spiff_task_is_in_process(spiff_task_identifier: str, bpmn_process_identifier: str) -> None: if spiff_task.task_spec.name == spiff_task_identifier: - base_failure_message = f"Failed on {bpmn_process_identifier} - {spiff_task_identifier}." - expected_python_env_data = expected_task_data[spiff_task.task_spec.name] + expected_task_data_key = spiff_task.task_spec.name if spiff_task.task_spec.name in spiff_tasks_checked_once: - expected_python_env_data = expected_task_data[f"{spiff_task.task_spec.name}_second"] + expected_task_data_key = f"{spiff_task.task_spec.name}_second" + + expected_python_env_data = expected_task_data[expected_task_data_key] + + base_failure_message = f"Failed on {bpmn_process_identifier} - {spiff_task_identifier} - task data key {expected_task_data_key}." task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() assert task_model.start_in_seconds is not None @@ -354,7 +404,7 @@ class TestProcessInstanceProcessor(BaseTest): assert task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier message = ( - f"{base_failure_message} Expected: {expected_python_env_data}. Received: {task_model.json_data()}" + f"{base_failure_message} Expected: {sorted(expected_python_env_data)}. Received: {sorted(task_model.json_data())}" ) # TODO: if we split out env data again we will need to use it here instead of json_data # assert task_model.python_env_data() == expected_python_env_data, message From 2e4b0a507b4b2880c0a150cd750585e3bef593b3 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 22 Mar 2023 12:22:27 -0400 Subject: [PATCH 079/162] do not fail if cannot get a token for a user in login w/ burnettk --- spiffworkflow-backend/bin/login_with_users | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spiffworkflow-backend/bin/login_with_users b/spiffworkflow-backend/bin/login_with_users index 3d73e036..167c57f9 100755 --- a/spiffworkflow-backend/bin/login_with_users +++ b/spiffworkflow-backend/bin/login_with_users @@ -28,7 +28,7 @@ REALM_NAME=${2-spiffworkflow} while read -r input_line; do if ! grep -qE '(^#|email)' <<<"$input_line" ; then username=$(awk -F '@' '{print $1}' <<<"$input_line") - access_token=$("${script_dir}/get_token" "$username" "$username" "$REALM_NAME") + access_token=$("${script_dir}/get_token" "$username" "$username" "$REALM_NAME" || echo '') if [[ -z "$access_token" || "$access_token" == "null" ]]; then >&2 echo "ERROR: failed to get access token for '$username'" else From 63acf11b5d7ed4b05c0383276677d48dfb59bdf5 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 22 Mar 2023 14:39:04 -0400 Subject: [PATCH 080/162] some more debugging for resetting a process to specific task w/ burnettk --- .../services/process_instance_processor.py | 81 +++++++++++-------- .../unit/test_process_instance_processor.py | 20 +++-- 2 files changed, 61 insertions(+), 40 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index c97fa733..849a0ee5 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1338,53 +1338,64 @@ class ProcessInstanceProcessor: TaskModel.end_in_seconds.is_(None), # type: ignore ), TaskModel.process_instance_id == process_instance.id, - TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore + # TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore ) ) tasks_to_update = tasks_to_update_query.all() # run all queries before making changes to task_model if commit: - tasks_to_delete_query = db.session.query(TaskModel).filter( - and_( - or_( - TaskModel.end_in_seconds > to_task_model.end_in_seconds, - TaskModel.end_in_seconds.is_not(None), # type: ignore - ), - TaskModel.process_instance_id == process_instance.id, - TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore - TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore - ) - ) + # tasks_to_delete_query = db.session.query(TaskModel).filter( + # and_( + # or_( + # TaskModel.end_in_seconds > to_task_model.end_in_seconds, + # TaskModel.end_in_seconds.is_not(None), # type: ignore + # ), + # TaskModel.process_instance_id == process_instance.id, + # TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore + # TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore + # ) + # ) + # + # tasks_to_delete = tasks_to_delete_query.all() + # + # # delete any later tasks from to_task_model and delete bpmn processes that may be + # # link directly to one of those tasks. + # tasks_to_delete_guids = [t.guid for t in tasks_to_delete] + # tasks_to_delete_ids = [t.id for t in tasks_to_delete] + # bpmn_processes_to_delete = BpmnProcessModel.query.filter( + # BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore + # ).order_by(BpmnProcessModel.id.desc()).all() + # human_tasks_to_delete = HumanTaskModel.query.filter( + # HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore + # ).all() + # + # + # import pdb; pdb.set_trace() + # # ensure the correct order for foreign keys + # for human_task_to_delete in human_tasks_to_delete: + # db.session.delete(human_task_to_delete) + # db.session.commit() + # for task_to_delete in tasks_to_delete: + # db.session.delete(task_to_delete) + # db.session.commit() + # for bpmn_process_to_delete in bpmn_processes_to_delete: + # db.session.delete(bpmn_process_to_delete) + # db.session.commit() - tasks_to_delete = tasks_to_delete_query.all() - - # delete any later tasks from to_task_model and delete bpmn processes that may be - # link directly to one of those tasks. - tasks_to_delete_guids = [t.guid for t in tasks_to_delete] - tasks_to_delete_ids = [t.id for t in tasks_to_delete] - bpmn_processes_to_delete = BpmnProcessModel.query.filter( - BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore - ).all() - human_tasks_to_delete = HumanTaskModel.query.filter( - HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore - ).all() - - # ensure the correct order for foreign keys - for human_task_to_delete in human_tasks_to_delete: - db.session.delete(human_task_to_delete) - db.session.commit() - for task_to_delete in tasks_to_delete: - db.session.delete(task_to_delete) - db.session.commit() - for bpmn_process_to_delete in bpmn_processes_to_delete: - db.session.delete(bpmn_process_to_delete) - db.session.commit() related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() if related_human_task is not None: db.session.delete(related_human_task) + tasks_to_update_ids = [t.id for t in tasks_to_update] + human_tasks_to_delete = HumanTaskModel.query.filter( + HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore + ).all() + for human_task_to_delete in human_tasks_to_delete: + db.session.delete(human_task_to_delete) + db.session.commit() + for task_to_update in tasks_to_update: TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index d1f5da24..9b447f34 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -298,11 +298,21 @@ class TestProcessInstanceProcessor(BaseTest): ) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - # process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() - # processor = ProcessInstanceProcessor(process_instance) - # human_task_one = process_instance.active_human_tasks[0] - # spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) - # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + processor.suspend() + ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.id), commit=True) + + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + processor = ProcessInstanceProcessor(process_instance) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + import pdb; pdb.set_trace() + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + import pdb; pdb.set_trace() + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + + import pdb; pdb.set_trace() def test_properly_saves_tasks_when_running( self, From 1a84f4802fc182d1eba0269a4e25b739827f2066 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 22 Mar 2023 15:52:00 -0400 Subject: [PATCH 081/162] added script to create and run a process model w/ burnettk --- .../bin/run_process_model_with_api | 59 +++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100755 spiffworkflow-backend/bin/run_process_model_with_api diff --git a/spiffworkflow-backend/bin/run_process_model_with_api b/spiffworkflow-backend/bin/run_process_model_with_api new file mode 100755 index 00000000..f8b3a6c7 --- /dev/null +++ b/spiffworkflow-backend/bin/run_process_model_with_api @@ -0,0 +1,59 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" + +if [[ -z "${KEYCLOAK_BASE_URL:-}" ]]; then + export KEYCLOAK_BASE_URL=https://keycloak.dev.spiffworkflow.org +fi +if [[ -z "${BACKEND_BASE_URL:-}" ]]; then + export BACKEND_BASE_URL=https://api.dev.spiffworkflow.org +fi + +process_model_identifier="${1:-}" +username="${2:-admin}" +password="${3:-admin}" +realm_name="${4:-spiffworkflow}" +if [[ -z "${1:-}" ]]; then + >&2 echo "usage: $(basename "$0") [process_model_identifier] [username: OPTONAL] [password: OPTONAL] [realm_name: OPTONAL]" + exit 1 +fi + +modified_process_model_identifier=$(tr '/' ':' <<<"$process_model_identifier") + +function process_next_task() { + local next_task="$1" + + if [[ -n "$next_task" && "$next_task" != "null" ]]; then + task_type=$(jq -r '.type' <<<"$next_task") + task_state=$(jq -r '.state' <<<"$next_task") + task_guid=$(jq -r '.id' <<<$"$next_task") + + if grep -qE "Manual ?Task" <<<"$task_type" && [[ "${task_state}" == "READY" ]]; then + next_task=$(curl --silent -X PUT "${BACKEND_BASE_URL}/v1.0/tasks/${process_instance_id}/${task_guid}" -H "Authorization: Bearer $access_token") + process_next_task "$next_task" + elif [[ "$(jq '.ok' <<<"$next_task")" == "null" ]]; then + echo -e "\n\nThe next task is not a Manual Task and requires user input. It must be completed manually." + echo "$next_task" + fi + fi +} + +access_token=$("${script_dir}/get_token" "$username" "$password" "$realm_name") +curl --silent -X POST "${BACKEND_BASE_URL}/v1.0/login_with_access_token?access_token=${access_token}" -H "Authorization: Bearer $access_token" >/dev/null +result=$(curl --silent -X POST "${BACKEND_BASE_URL}/v1.0/process-instances/${modified_process_model_identifier}" -H "Authorization: Bearer $access_token") +process_instance_id=$(jq '.id' <<<"$result") +if ! grep -qE '^[0-9]+$' <<<"$process_instance_id"; then + >&2 echo "ERROR: Did not receive valid process instance id when instantiating process model. result was ${result}" + exit 1 +fi + +result=$(curl --silent -X POST "${BACKEND_BASE_URL}/v1.0/process-instances/${modified_process_model_identifier}/${process_instance_id}/run" -H "Authorization: Bearer $access_token") +next_task=$(jq '.next_task' <<<"$result") +process_next_task "$next_task" From 17dd287541f113305e61205ae30421cf2329cfde Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 23 Mar 2023 07:41:59 -0400 Subject: [PATCH 082/162] added check result function when running a process model from the shell script --- spiffworkflow-backend/bin/run_process_model_with_api | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/spiffworkflow-backend/bin/run_process_model_with_api b/spiffworkflow-backend/bin/run_process_model_with_api index f8b3a6c7..c62e43ad 100755 --- a/spiffworkflow-backend/bin/run_process_model_with_api +++ b/spiffworkflow-backend/bin/run_process_model_with_api @@ -27,6 +27,15 @@ fi modified_process_model_identifier=$(tr '/' ':' <<<"$process_model_identifier") +function check_result_for_error() { + local result="$1" + error_code=$(jq '.error_code' <<<"$result") + if [[ -n "$error_code" ]]; then + >&2 echo "ERROR: Failed to run process instance. Received error: $result" + exit 1 + fi +} + function process_next_task() { local next_task="$1" @@ -37,6 +46,7 @@ function process_next_task() { if grep -qE "Manual ?Task" <<<"$task_type" && [[ "${task_state}" == "READY" ]]; then next_task=$(curl --silent -X PUT "${BACKEND_BASE_URL}/v1.0/tasks/${process_instance_id}/${task_guid}" -H "Authorization: Bearer $access_token") + check_result_for_error "$next_task" process_next_task "$next_task" elif [[ "$(jq '.ok' <<<"$next_task")" == "null" ]]; then echo -e "\n\nThe next task is not a Manual Task and requires user input. It must be completed manually." @@ -55,5 +65,6 @@ if ! grep -qE '^[0-9]+$' <<<"$process_instance_id"; then fi result=$(curl --silent -X POST "${BACKEND_BASE_URL}/v1.0/process-instances/${modified_process_model_identifier}/${process_instance_id}/run" -H "Authorization: Bearer $access_token") +check_result_for_error "$result" next_task=$(jq '.next_task' <<<"$result") process_next_task "$next_task" From 00a0e901f105b9c204943ae54e37c9241aee1e52 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 23 Mar 2023 10:44:09 -0400 Subject: [PATCH 083/162] commented out reset process code and added comment and raise until we get it actually working and fixed issue with viewing at completed task where it was not including the tasks for the parent bpmn processes --- .../models/bpmn_process.py | 3 + .../models/bpmn_process_definition.py | 3 + .../bpmn_process_definition_relationship.py | 3 + .../models/task_definition.py | 3 + .../routes/process_instances_controller.py | 41 +++- .../services/process_instance_processor.py | 212 +++++++++--------- .../services/task_service.py | 11 + .../services/workflow_execution_service.py | 2 + .../unit/test_process_instance_processor.py | 125 ++++++----- 9 files changed, 234 insertions(+), 169 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py index c38fed7b..d5ba53df 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py @@ -1,5 +1,7 @@ from __future__ import annotations +from dataclasses import dataclass + from sqlalchemy import ForeignKey from sqlalchemy.orm import relationship @@ -18,6 +20,7 @@ class BpmnProcessNotFoundError(Exception): # "success", # boolean # "bpmn_messages", # if top-level process # "correlations", # if top-level process +@dataclass class BpmnProcessModel(SpiffworkflowBaseDBModel): __tablename__ = "bpmn_process" id: int = db.Column(db.Integer, primary_key=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition.py index 7f60d751..90206235 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition.py @@ -1,5 +1,7 @@ from __future__ import annotations +from dataclasses import dataclass + from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel @@ -10,6 +12,7 @@ from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel # # each subprocess will have its own row in this table. # there is a join table to link them together: bpmn_process_definition_relationship +@dataclass class BpmnProcessDefinitionModel(SpiffworkflowBaseDBModel): __tablename__ = "bpmn_process_definition" id: int = db.Column(db.Integer, primary_key=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition_relationship.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition_relationship.py index 096570d8..51126503 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition_relationship.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition_relationship.py @@ -1,5 +1,7 @@ from __future__ import annotations +from dataclasses import dataclass + from sqlalchemy import ForeignKey from sqlalchemy import UniqueConstraint @@ -10,6 +12,7 @@ from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel +@dataclass class BpmnProcessDefinitionRelationshipModel(SpiffworkflowBaseDBModel): __tablename__ = "bpmn_process_definition_relationship" __table_args__ = ( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py index 791e1dea..ec243649 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py @@ -1,5 +1,7 @@ from __future__ import annotations +from dataclasses import dataclass + from sqlalchemy import ForeignKey from sqlalchemy import UniqueConstraint from sqlalchemy.orm import relationship @@ -11,6 +13,7 @@ from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel +@dataclass class TaskDefinitionModel(SpiffworkflowBaseDBModel): __tablename__ = "task_definition" __table_args__ = ( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index b0cde36f..75e2a23c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -606,6 +606,8 @@ def process_instance_task_list( TaskModel.process_instance_id == process_instance.id, ) + to_task_model: Optional[TaskModel] = None + task_models_of_parent_bpmn_processes_guids: list[str] = [] if to_task_guid is not None: to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() if to_task_model is None: @@ -614,7 +616,28 @@ def process_instance_task_list( message=f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'", status_code=400, ) - task_model_query = task_model_query.filter(TaskModel.end_in_seconds <= to_task_model.end_in_seconds) + + if to_task_model.state != "COMPLETED": + # TODO: find a better term for viewing at task state + raise ApiError( + error_code="task_cannot_be_viewed_at", + message=( + f"Desired task with guid '{to_task_guid}' for process instance '{process_instance.id}' was never" + " completed and therefore cannot be viewed at." + ), + status_code=400, + ) + + _parent_bpmn_processes, task_models_of_parent_bpmn_processes = ( + TaskService.task_models_of_parent_bpmn_processes(to_task_model) + ) + task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] + task_model_query = task_model_query.filter( + or_( + TaskModel.end_in_seconds <= to_task_model.end_in_seconds, # type: ignore + TaskModel.guid.in_(task_models_of_parent_bpmn_processes_guids), # type: ignore + ) + ) bpmn_process_alias = aliased(BpmnProcessModel) direct_parent_bpmn_process_alias = aliased(BpmnProcessModel) @@ -649,6 +672,9 @@ def process_instance_task_list( TaskDefinitionModel.properties_json.label("task_definition_properties_json"), # type: ignore TaskModel.guid, TaskModel.state, + TaskModel.properties_json, + TaskModel.end_in_seconds, + TaskModel.start_in_seconds, ) ) @@ -656,11 +682,18 @@ def process_instance_task_list( task_model_query = task_model_query.filter(bpmn_process_alias.id.in_(bpmn_process_ids)) task_models = task_model_query.all() - if to_task_guid is not None: + if to_task_model is not None: task_models_dict = json.loads(current_app.json.dumps(task_models)) for task_model in task_models_dict: - if task_model["guid"] == to_task_guid and task_model["state"] == "COMPLETED": - task_model["state"] = "READY" + end_in_seconds = float(task_model["end_in_seconds"]) + if to_task_model.guid == task_model["guid"] and task_model["state"] == "COMPLETED": + TaskService.reset_task_model_dict(task_model, state="READY") + elif ( + end_in_seconds is None + or to_task_model.end_in_seconds is None + or to_task_model.end_in_seconds < end_in_seconds + ) and task_model["guid"] in task_models_of_parent_bpmn_processes_guids: + TaskService.reset_task_model_dict(task_model, state="WAITING") return make_response(jsonify(task_models_dict), 200) return make_response(jsonify(task_models), 200) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 849a0ee5..535a2be4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -52,8 +52,6 @@ from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ign from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore -from sqlalchemy import and_ -from sqlalchemy import or_ from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel @@ -1311,114 +1309,118 @@ class ProcessInstanceProcessor: # Saving the workflow seems to reset the status self.suspend() + # FIXME: this currently cannot work for multi-instance tasks and loopback. It can somewhat for not those + # if we can properly handling resetting children tasks. Right now if we set them all to FUTURE then + # they never get picked up by spiff and processed. The process instance just stops after the to_task_guid + # and marks itself complete without processing any of the children. @classmethod def reset_process( cls, process_instance: ProcessInstanceModel, to_task_guid: str, commit: Optional[bool] = False ) -> None: """Reset a process to an earlier state.""" - cls.add_event_to_process_instance( - process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid - ) - - to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() - if to_task_model is None: - raise TaskNotFoundError( - f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" - ) - - parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes( - to_task_model - ) - task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] - parent_bpmn_processes_ids = [p.id for p in parent_bpmn_processes] - tasks_to_update_query = db.session.query(TaskModel).filter( - and_( - or_( - TaskModel.end_in_seconds > to_task_model.end_in_seconds, - TaskModel.end_in_seconds.is_(None), # type: ignore - ), - TaskModel.process_instance_id == process_instance.id, - # TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore - ) - ) - tasks_to_update = tasks_to_update_query.all() - - # run all queries before making changes to task_model - if commit: - # tasks_to_delete_query = db.session.query(TaskModel).filter( - # and_( - # or_( - # TaskModel.end_in_seconds > to_task_model.end_in_seconds, - # TaskModel.end_in_seconds.is_not(None), # type: ignore - # ), - # TaskModel.process_instance_id == process_instance.id, - # TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore - # TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore - # ) - # ) - # - # tasks_to_delete = tasks_to_delete_query.all() - # - # # delete any later tasks from to_task_model and delete bpmn processes that may be - # # link directly to one of those tasks. - # tasks_to_delete_guids = [t.guid for t in tasks_to_delete] - # tasks_to_delete_ids = [t.id for t in tasks_to_delete] - # bpmn_processes_to_delete = BpmnProcessModel.query.filter( - # BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore - # ).order_by(BpmnProcessModel.id.desc()).all() - # human_tasks_to_delete = HumanTaskModel.query.filter( - # HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore - # ).all() - # - # - # import pdb; pdb.set_trace() - # # ensure the correct order for foreign keys - # for human_task_to_delete in human_tasks_to_delete: - # db.session.delete(human_task_to_delete) - # db.session.commit() - # for task_to_delete in tasks_to_delete: - # db.session.delete(task_to_delete) - # db.session.commit() - # for bpmn_process_to_delete in bpmn_processes_to_delete: - # db.session.delete(bpmn_process_to_delete) - # db.session.commit() - - - related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() - if related_human_task is not None: - db.session.delete(related_human_task) - - tasks_to_update_ids = [t.id for t in tasks_to_update] - human_tasks_to_delete = HumanTaskModel.query.filter( - HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore - ).all() - for human_task_to_delete in human_tasks_to_delete: - db.session.delete(human_task_to_delete) - db.session.commit() - - for task_to_update in tasks_to_update: - TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) - - parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first() - if parent_task_model is None: - raise TaskNotFoundError( - f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" - ) - - TaskService.reset_task_model( - to_task_model, - state="READY", - json_data_hash=parent_task_model.json_data_hash, - python_env_data_hash=parent_task_model.python_env_data_hash, - commit=commit, - ) - for task_model in task_models_of_parent_bpmn_processes: - TaskService.reset_task_model(task_model, state="WAITING", commit=commit) - - if commit: - processor = ProcessInstanceProcessor(process_instance) - processor.save() - processor.suspend() + raise Exception("This feature to reset a process instance to a given task is currently unavaiable") + # cls.add_event_to_process_instance( + # process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid + # ) + # + # to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() + # if to_task_model is None: + # raise TaskNotFoundError( + # f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" + # ) + # + # parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes( + # to_task_model + # ) + # [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] + # [p.id for p in parent_bpmn_processes] + # tasks_to_update_query = db.session.query(TaskModel).filter( + # and_( + # or_( + # TaskModel.end_in_seconds > to_task_model.end_in_seconds, + # TaskModel.end_in_seconds.is_(None), # type: ignore + # ), + # TaskModel.process_instance_id == process_instance.id, + # # TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore + # ) + # ) + # tasks_to_update = tasks_to_update_query.all() + # + # # run all queries before making changes to task_model + # if commit: + # # tasks_to_delete_query = db.session.query(TaskModel).filter( + # # and_( + # # or_( + # # TaskModel.end_in_seconds > to_task_model.end_in_seconds, + # # TaskModel.end_in_seconds.is_not(None), # type: ignore + # # ), + # # TaskModel.process_instance_id == process_instance.id, + # # TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore + # # TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore + # # ) + # # ) + # # + # # tasks_to_delete = tasks_to_delete_query.all() + # # + # # # delete any later tasks from to_task_model and delete bpmn processes that may be + # # # link directly to one of those tasks. + # # tasks_to_delete_guids = [t.guid for t in tasks_to_delete] + # # tasks_to_delete_ids = [t.id for t in tasks_to_delete] + # # bpmn_processes_to_delete = BpmnProcessModel.query.filter( + # # BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore + # # ).order_by(BpmnProcessModel.id.desc()).all() + # # human_tasks_to_delete = HumanTaskModel.query.filter( + # # HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore + # # ).all() + # # + # # + # # import pdb; pdb.set_trace() + # # # ensure the correct order for foreign keys + # # for human_task_to_delete in human_tasks_to_delete: + # # db.session.delete(human_task_to_delete) + # # db.session.commit() + # # for task_to_delete in tasks_to_delete: + # # db.session.delete(task_to_delete) + # # db.session.commit() + # # for bpmn_process_to_delete in bpmn_processes_to_delete: + # # db.session.delete(bpmn_process_to_delete) + # # db.session.commit() + # + # related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() + # if related_human_task is not None: + # db.session.delete(related_human_task) + # + # tasks_to_update_ids = [t.id for t in tasks_to_update] + # human_tasks_to_delete = HumanTaskModel.query.filter( + # HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore + # ).all() + # for human_task_to_delete in human_tasks_to_delete: + # db.session.delete(human_task_to_delete) + # db.session.commit() + # + # for task_to_update in tasks_to_update: + # TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) + # + # parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first() + # if parent_task_model is None: + # raise TaskNotFoundError( + # f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" + # ) + # + # TaskService.reset_task_model( + # to_task_model, + # state="READY", + # json_data_hash=parent_task_model.json_data_hash, + # python_env_data_hash=parent_task_model.python_env_data_hash, + # commit=commit, + # ) + # for task_model in task_models_of_parent_bpmn_processes: + # TaskService.reset_task_model(task_model, state="WAITING", commit=commit) + # + # if commit: + # processor = ProcessInstanceProcessor(process_instance) + # processor.save() + # processor.suspend() @staticmethod def get_parser() -> MyCustomParser: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index f75d955c..29a45677 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -332,6 +332,17 @@ class TaskService: return (bpmn_processes + b, [parent_task_model] + t) return (bpmn_processes, task_models) + @classmethod + def reset_task_model_dict( + cls, + task_model: dict, + state: str, + ) -> None: + task_model["state"] = state + task_model["start_in_seconds"] = None + task_model["end_in_seconds"] = None + task_model["properties_json"]["state"] = getattr(TaskState, state) + @classmethod def reset_task_model( cls, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 4d44308b..b8983f1d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -107,6 +107,8 @@ class TaskModelSavingDelegate(EngineStepDelegate): def after_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> None: if self._should_update_task_model(): + # TODO: also include children of the last task processed. This may help with task resets + # if we have to set their states to FUTURE. # excludes FUTURE and COMPLETED. the others were required to get PP1 to go to completion. for waiting_spiff_task in bpmn_process_instance.get_tasks( TaskState.WAITING | TaskState.CANCELLED | TaskState.READY | TaskState.MAYBE | TaskState.LIKELY diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 9b447f34..9ccda1cb 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -256,63 +256,60 @@ class TestProcessInstanceProcessor(BaseTest): assert spiff_task is not None assert spiff_task.state == TaskState.COMPLETED - def test_properly_resets_process_to_given_task( - self, - app: Flask, - client: FlaskClient, - with_db_and_bpmn_file_cleanup: None, - with_super_admin_user: UserModel, - ) -> None: - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") - initiator_user = self.find_or_create_user("initiator_user") - finance_user_three = self.find_or_create_user("testuser3") - assert initiator_user.principal is not None - assert finance_user_three.principal is not None - AuthorizationService.import_permissions_from_yaml_file() - - finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() - assert finance_group is not None - - process_model = load_test_spec( - process_model_id="test_group/manual_task_with_subprocesses", - process_model_source_directory="manual_task_with_subprocesses", - ) - process_instance = self.create_process_instance_from_process_model( - process_model=process_model, user=initiator_user - ) - processor = ProcessInstanceProcessor(process_instance) - processor.do_engine_steps(save=True) - assert len(process_instance.active_human_tasks) == 1 - initial_human_task_id = process_instance.active_human_tasks[0].id - - # save again to ensure we go attempt to process the human tasks again - processor.save() - - assert len(process_instance.active_human_tasks) == 1 - assert initial_human_task_id == process_instance.active_human_tasks[0].id - - processor = ProcessInstanceProcessor(process_instance) - human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( - human_task_one.task_name, processor.bpmn_process_instance - ) - ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - - processor.suspend() - ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.id), commit=True) - - process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() - processor = ProcessInstanceProcessor(process_instance) - human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) - import pdb; pdb.set_trace() - ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - import pdb; pdb.set_trace() - human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) - ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - - import pdb; pdb.set_trace() + # TODO: FIX resetting a process instance to a task + # def test_properly_resets_process_to_given_task( + # self, + # app: Flask, + # client: FlaskClient, + # with_db_and_bpmn_file_cleanup: None, + # with_super_admin_user: UserModel, + # ) -> None: + # self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + # initiator_user = self.find_or_create_user("initiator_user") + # finance_user_three = self.find_or_create_user("testuser3") + # assert initiator_user.principal is not None + # assert finance_user_three.principal is not None + # AuthorizationService.import_permissions_from_yaml_file() + # + # finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() + # assert finance_group is not None + # + # process_model = load_test_spec( + # process_model_id="test_group/manual_task_with_subprocesses", + # process_model_source_directory="manual_task_with_subprocesses", + # ) + # process_instance = self.create_process_instance_from_process_model( + # process_model=process_model, user=initiator_user + # ) + # processor = ProcessInstanceProcessor(process_instance) + # processor.do_engine_steps(save=True) + # assert len(process_instance.active_human_tasks) == 1 + # initial_human_task_id = process_instance.active_human_tasks[0].id + # + # # save again to ensure we go attempt to process the human tasks again + # processor.save() + # + # assert len(process_instance.active_human_tasks) == 1 + # assert initial_human_task_id == process_instance.active_human_tasks[0].id + # + # processor = ProcessInstanceProcessor(process_instance) + # human_task_one = process_instance.active_human_tasks[0] + # spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( + # human_task_one.task_name, processor.bpmn_process_instance + # ) + # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + # + # processor.suspend() + # ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.id), commit=True) + # + # process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + # processor = ProcessInstanceProcessor(process_instance) + # human_task_one = process_instance.active_human_tasks[0] + # spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + # human_task_one = process_instance.active_human_tasks[0] + # spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) def test_properly_saves_tasks_when_running( self, @@ -374,7 +371,11 @@ class TestProcessInstanceProcessor(BaseTest): } third_data_set = { **second_data_set, - **{"set_in_test_process_to_call_script": 1, "set_in_test_process_to_call_subprocess_subprocess_script": 1, "set_in_test_process_to_call_subprocess_script": 1}, + **{ + "set_in_test_process_to_call_script": 1, + "set_in_test_process_to_call_subprocess_subprocess_script": 1, + "set_in_test_process_to_call_subprocess_script": 1, + }, } fourth_data_set = {**third_data_set, **{"a": 1, "we_move_on": True}} fifth_data_set = {**fourth_data_set, **{"validate_only": False, "set_top_level_process_script_after_gate": 1}} @@ -401,7 +402,10 @@ class TestProcessInstanceProcessor(BaseTest): expected_python_env_data = expected_task_data[expected_task_data_key] - base_failure_message = f"Failed on {bpmn_process_identifier} - {spiff_task_identifier} - task data key {expected_task_data_key}." + base_failure_message = ( + f"Failed on {bpmn_process_identifier} - {spiff_task_identifier} - task data key" + f" {expected_task_data_key}." + ) task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() assert task_model.start_in_seconds is not None @@ -414,7 +418,8 @@ class TestProcessInstanceProcessor(BaseTest): assert task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier message = ( - f"{base_failure_message} Expected: {sorted(expected_python_env_data)}. Received: {sorted(task_model.json_data())}" + f"{base_failure_message} Expected: {sorted(expected_python_env_data)}. Received:" + f" {sorted(task_model.json_data())}" ) # TODO: if we split out env data again we will need to use it here instead of json_data # assert task_model.python_env_data() == expected_python_env_data, message From b61b6d7921858d6c30fb04bad8c7c7e7efd6bd5b Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 23 Mar 2023 11:08:23 -0400 Subject: [PATCH 084/162] some updates to add the log link back into the log list w/ burnettk --- spiffworkflow-frontend/src/interfaces.ts | 17 +++++++ .../src/routes/ProcessInstanceLogList.tsx | 47 ++++++++++++------- .../src/routes/ProcessInstanceShow.tsx | 2 +- 3 files changed, 49 insertions(+), 17 deletions(-) diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index 4e65bd02..2b1a457d 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -300,3 +300,20 @@ export interface JsonSchemaForm { process_model_id: string; required: string[]; } + +export interface ProcessInstanceLogEntry { + bpmn_process_definition_identifier: string; + bpmn_process_definition_name: string; + bpmn_task_type: string; + event_type: string; + spiff_task_guid: string; + task_definition_identifier: string; + task_guid: string; + timestamp: number; + id: number; + process_instance_id: number; + + task_definition_name?: string; + user_id?: number; + username?: string; +} diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index 4f1d39be..797ba254 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -1,7 +1,7 @@ import { useEffect, useState } from 'react'; // @ts-ignore import { Table, Tabs, TabList, Tab } from '@carbon/react'; -import { useParams, useSearchParams } from 'react-router-dom'; +import { Link, useParams, useSearchParams } from 'react-router-dom'; import PaginationForTable from '../components/PaginationForTable'; import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; import { @@ -10,6 +10,7 @@ import { } from '../helpers'; import HttpService from '../services/HttpService'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; +import { ProcessInstanceLogEntry } from '../interfaces'; type OwnProps = { variant: string; @@ -50,25 +51,26 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { isDetailedView, ]); - const getTableRow = (row: any) => { + const getTableRow = (logEntry: ProcessInstanceLogEntry) => { const tableRow = []; const taskNameCell = ( - {row.task_definition_name || - (row.bpmn_task_type === 'StartEvent' ? 'Process Started' : '') || - (row.bpmn_task_type === 'EndEvent' ? 'Process Ended' : '')} + {logEntry.spiff_task_guid || + logEntry.task_definition_name || + (logEntry.bpmn_task_type === 'StartEvent' ? 'Process Started' : '') || + (logEntry.bpmn_task_type === 'EndEvent' ? 'Process Ended' : '')} ); const bpmnProcessCell = ( - {row.bpmn_process_definition_name || - row.bpmn_process_definition_identifier} + {logEntry.bpmn_process_definition_name || + logEntry.bpmn_process_definition_identifier} ); if (isDetailedView) { tableRow.push( <> - {row.id} + {logEntry.id} {bpmnProcessCell} {taskNameCell} @@ -84,24 +86,37 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { if (isDetailedView) { tableRow.push( <> - {row.bpmn_task_type} - {row.event_type} + {logEntry.bpmn_task_type} + {logEntry.event_type} - {row.username || ( + {logEntry.username || ( system )} ); } - tableRow.push({convertSecondsToFormattedDateTime(row.timestamp)}); - return {tableRow}; + // tableRow.push({convertSecondsToFormattedDateTime(logEntry.timestamp)}); + tableRow.push( + + + {convertSecondsToFormattedDateTime(logEntry.timestamp)} + + + ); + + return {tableRow}; }; const buildTable = () => { - const rows = processInstanceLogs.map((row) => { - return getTableRow(row); - }); + const rows = processInstanceLogs.map( + (logEntry: ProcessInstanceLogEntry) => { + return getTableRow(logEntry); + } + ); const tableHeaders = []; if (isDetailedView) { diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 74e6e1a8..1cfc36c2 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -608,7 +608,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const isCurrentTask = (task: Task) => { const subprocessTypes = [ 'Subprocess', - 'Call Activity', + 'CallActivity', 'Transactional Subprocess', ]; return ( From cb6ba526779a3797a3baa31423da3b26f03bd229 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 23 Mar 2023 13:55:16 -0400 Subject: [PATCH 085/162] added link to go back to most recent --- .../routes/process_instances_controller.py | 2 +- .../src/routes/ProcessInstanceLogList.tsx | 30 ++++++---- .../src/routes/ProcessInstanceShow.tsx | 59 +++++++++++++++---- 3 files changed, 65 insertions(+), 26 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 75e2a23c..bb5cef6c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -685,7 +685,7 @@ def process_instance_task_list( if to_task_model is not None: task_models_dict = json.loads(current_app.json.dumps(task_models)) for task_model in task_models_dict: - end_in_seconds = float(task_model["end_in_seconds"]) + end_in_seconds = float(task_model["end_in_seconds"]) if task_model["end_in_seconds"] is not None else None if to_task_model.guid == task_model["guid"] and task_model["state"] == "COMPLETED": TaskService.reset_task_model_dict(task_model, state="READY") elif ( diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index 797ba254..a59b2fab 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -55,8 +55,7 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { const tableRow = []; const taskNameCell = ( - {logEntry.spiff_task_guid || - logEntry.task_definition_name || + {logEntry.task_definition_name || (logEntry.bpmn_task_type === 'StartEvent' ? 'Process Started' : '') || (logEntry.bpmn_task_type === 'EndEvent' ? 'Process Ended' : '')} @@ -96,17 +95,24 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { ); } - // tableRow.push({convertSecondsToFormattedDateTime(logEntry.timestamp)}); - tableRow.push( - - - {convertSecondsToFormattedDateTime(logEntry.timestamp)} - - + + let timestampComponent = ( + {convertSecondsToFormattedDateTime(logEntry.timestamp)} ); + if (logEntry.spiff_task_guid) { + timestampComponent = ( + + + {convertSecondsToFormattedDateTime(logEntry.timestamp)} + + + ); + } + tableRow.push(timestampComponent); return {tableRow}; }; diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 1cfc36c2..07cb45fd 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -605,7 +605,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { } }; - const isCurrentTask = (task: Task) => { + const isActiveTask = (task: Task) => { const subprocessTypes = [ 'Subprocess', 'CallActivity', @@ -622,7 +622,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return ( processInstance && ability.can('PUT', targetUris.processInstanceTaskDataPath) && - isCurrentTask(task) && + isActiveTask(task) && processInstance.status === 'suspended' && showingLastSpiffStep() ); @@ -646,7 +646,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { processInstance && processInstance.status === 'suspended' && ability.can('POST', targetUris.processInstanceCompleteTaskPath) && - isCurrentTask(task) && + isActiveTask(task) && showingLastSpiffStep() ); }; @@ -976,16 +976,18 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ): {taskToUse.state} {taskDisplayButtons(taskToUse)} -
- - {completionViewLink( - 'View state at task completion', - taskToUse.guid - )} - -
-
-
+ {taskToUse.state == 'COMPLETED' ? ( +
+ + {completionViewLink( + 'View state at task completion', + taskToUse.guid + )} + +
+
+
+ ) : null} {selectingEvent ? eventSelector(candidateEvents) : taskDataContainer()} @@ -1029,6 +1031,36 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return elements; }; + // right now this just assume if to_task_guid was passed in then + // this cannot be the active task. + // we may need a better way to figure this out. + const showingActiveTask = () => { + return !!params.to_task_guid; + }; + + const viewMostRecentStateComponent = () => { + if (!showingActiveTask()) { + return null; + } + + return ( + <> + + + + View at most recent state + + + +
+ + ); + }; + if (processInstance && (tasks || tasksCallHadError)) { const taskIds = getTaskIds(); const processModelId = unModifyProcessIdentifierForPathParam( @@ -1083,6 +1115,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { {processDataDisplayArea()} {processInstanceMetadataArea()}
+ {viewMostRecentStateComponent()} Date: Thu, 23 Mar 2023 14:24:41 -0400 Subject: [PATCH 086/162] add test user to keycloak --- .../realm_exports/spiffworkflow-realm.json | 73 ++++++++++++------- .../keycloak/test_user_lists/status | 1 + 2 files changed, 49 insertions(+), 25 deletions(-) diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index 99e651b9..87d72394 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -1884,6 +1884,29 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "992c7cfb-377f-4d80-b399-edf218ad640e", + "createdTimestamp" : 1679595782179, + "username" : "jamescheung", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "jamescheung@status.im", + "attributes" : { + "spiffworkflow-employeeid" : [ "234" ] + }, + "credentials" : [ { + "id" : "3e62811d-d294-4c2b-a681-3a93ea0f8bc2", + "type" : "password", + "createdDate" : 1679595782238, + "secretData" : "{\"value\":\"oFDel18kGBSpCvfrni1SSY2Ti3eJmYxCuwcar5PoBHECXISIbuz0t5i97COiXCI52vxSkorwl3c8r2j+77B2kw==\",\"salt\":\"tVvRYyNH4ktBXNjmfP6JtQ==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "2df44301-506a-4053-9ece-830d2b3c295b", "createdTimestamp" : 1676302142640, @@ -4624,7 +4647,7 @@ "subType" : "authenticated", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-property-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-address-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper", "saml-user-property-mapper" ] } }, { "id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", @@ -4642,7 +4665,7 @@ "subType" : "anonymous", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-full-name-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "oidc-usermodel-property-mapper" ] + "allowed-protocol-mapper-types" : [ "saml-role-list-mapper", "oidc-usermodel-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper" ] } }, { "id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c", @@ -4732,7 +4755,7 @@ "internationalizationEnabled" : false, "supportedLocales" : [ ], "authenticationFlows" : [ { - "id" : "38a6b336-b026-46be-a8be-e8ff7b9da407", + "id" : "2b106fbb-fa1a-4acd-b95a-08e3ace9a0fc", "alias" : "Account verification options", "description" : "Method with which to verity the existing account", "providerId" : "basic-flow", @@ -4754,7 +4777,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "eb9fe753-cd35-4e65-bb34-e83ba7059566", + "id" : "e3c77b34-6f89-4ddf-90da-486ad2cf620d", "alias" : "Authentication Options", "description" : "Authentication options.", "providerId" : "basic-flow", @@ -4783,7 +4806,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "aa9c74f7-0426-4440-907f-4aa0f999eb1e", + "id" : "4df60d27-2ad2-4819-a7a2-45b5e8cc054b", "alias" : "Browser - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4805,7 +4828,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "eb2a0849-c316-46bc-8b06-fd0cc50e3f32", + "id" : "c6c0ab1c-e8cc-47f6-8b19-c89c9ad431aa", "alias" : "Direct Grant - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4827,7 +4850,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "8f064003-823b-4be1-aa66-7324bf38c741", + "id" : "d331b984-7398-4e87-9357-4f16b4389a6e", "alias" : "First broker login - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4849,7 +4872,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "eef22678-b09c-4ca8-bdcf-90ea44ff0120", + "id" : "87cfccbe-25bc-41d8-b009-9b8e65ea244a", "alias" : "Handle Existing Account", "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", "providerId" : "basic-flow", @@ -4871,7 +4894,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "4367f263-ef2c-426e-b5cd-49fff868ea1a", + "id" : "893e65ce-b2f2-4323-9c5a-bedfaef72ded", "alias" : "Reset - Conditional OTP", "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", "providerId" : "basic-flow", @@ -4893,7 +4916,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "b2e9c608-1779-4c03-b32a-03c77450abae", + "id" : "815ce99a-21fe-43fb-8d73-4ff433d2c231", "alias" : "User creation or linking", "description" : "Flow for the existing/non-existing user alternatives", "providerId" : "basic-flow", @@ -4916,7 +4939,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "a8c79324-1881-4bb0-a8a2-83dfd54cacd1", + "id" : "c05fc254-0382-49c3-a666-00623d5ee1fe", "alias" : "Verify Existing Account by Re-authentication", "description" : "Reauthentication of existing account", "providerId" : "basic-flow", @@ -4938,7 +4961,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "d1aa83c6-da36-4cb6-b6ed-f6ec556df614", + "id" : "df015ab7-fa9d-416f-bcf0-a2ec26c13ede", "alias" : "browser", "description" : "browser based authentication", "providerId" : "basic-flow", @@ -4974,7 +4997,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "2afecfef-4bfb-4842-b338-7ed032a618d2", + "id" : "95a3d414-80a3-42de-abdb-40512b13229e", "alias" : "clients", "description" : "Base authentication for clients", "providerId" : "client-flow", @@ -5010,7 +5033,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "34dc1854-4969-4065-90e6-fef38b0dea98", + "id" : "923d6322-6d29-40bc-87e1-bcf13c6158fb", "alias" : "direct grant", "description" : "OpenID Connect Resource Owner Grant", "providerId" : "basic-flow", @@ -5039,7 +5062,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "40557323-dbbc-48ee-9ed1-748b11c9628d", + "id" : "2f07fd17-a290-4d48-af3e-3cfd527fa5a1", "alias" : "docker auth", "description" : "Used by Docker clients to authenticate against the IDP", "providerId" : "basic-flow", @@ -5054,7 +5077,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "d18b5c50-39fa-4b11-a7d2-0e6768e275c1", + "id" : "54100d19-bc91-4cba-af55-297a543eaa9a", "alias" : "first broker login", "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", "providerId" : "basic-flow", @@ -5077,7 +5100,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "976be80d-a88b-412c-8ad2-9ebe427793d4", + "id" : "05921eb5-d82c-4563-99e0-55e7911bf550", "alias" : "forms", "description" : "Username, password, otp and other auth forms.", "providerId" : "basic-flow", @@ -5099,7 +5122,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "83b3a411-ff7c-4cba-845a-9554c536d6b1", + "id" : "222cb198-cac2-4d61-826c-47aa77d73d3a", "alias" : "http challenge", "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", "providerId" : "basic-flow", @@ -5121,7 +5144,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "1cb835a6-b38c-4f29-a6d8-d04d0a84d05e", + "id" : "5941ffb8-9d61-4b7e-b46e-b9160b92d9bc", "alias" : "registration", "description" : "registration flow", "providerId" : "basic-flow", @@ -5137,7 +5160,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7ec06c82-6802-4ff4-a3ab-9b6a0b8dbc4b", + "id" : "d63ba5c0-e9ed-4f92-a6b8-c4f69b6258a8", "alias" : "registration form", "description" : "registration form", "providerId" : "form-flow", @@ -5173,7 +5196,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "f3bc2f7b-2074-4d93-9578-3abf648a6681", + "id" : "82d2eb72-4cfa-41be-b800-96633b6bbf60", "alias" : "reset credentials", "description" : "Reset credentials for a user if they forgot their password or something", "providerId" : "basic-flow", @@ -5209,7 +5232,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "e62e031b-9922-4682-b867-bc5c3a4a7e99", + "id" : "aeacc85c-e8da-41c8-84bb-4740214c3d1f", "alias" : "saml ecp", "description" : "SAML ECP Profile Authentication Flow", "providerId" : "basic-flow", @@ -5225,13 +5248,13 @@ } ] } ], "authenticatorConfig" : [ { - "id" : "c449f0aa-5f3c-4107-9f04-3222fa93a486", + "id" : "1f753a86-8657-4ec9-87bc-94d79e3aa3f8", "alias" : "create unique user config", "config" : { "require.password.update.after.registration" : "false" } }, { - "id" : "f7a6ed54-0ab8-4f29-9877-960bd65bf394", + "id" : "b92405c6-1646-4cf2-8c8d-0f66026024ed", "alias" : "review profile config", "config" : { "update.profile.on.first.login" : "missing" @@ -5326,4 +5349,4 @@ "clientPolicies" : { "policies" : [ ] } -} +} \ No newline at end of file diff --git a/spiffworkflow-backend/keycloak/test_user_lists/status b/spiffworkflow-backend/keycloak/test_user_lists/status index 5af7736d..c702a9a6 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/status +++ b/spiffworkflow-backend/keycloak/test_user_lists/status @@ -57,6 +57,7 @@ infra4.sme@status.im,175 infra5.sme@status.im,176 infra6.sme@status.im,212 jakub@status.im +jamescheung@status.im,234 jarrad@status.im lead@status.im,114 legal-a1.sme@status.im,205 From 2939297bfe31de9e58529025d13899ddf08b236c Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 23 Mar 2023 14:25:45 -0400 Subject: [PATCH 087/162] lint --- .../keycloak/realm_exports/spiffworkflow-realm.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index 87d72394..c7781b81 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -5349,4 +5349,4 @@ "clientPolicies" : { "policies" : [ ] } -} \ No newline at end of file +} From e18d13260ddfb4dab62e40b31d5e6fafd2ff14a0 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 23 Mar 2023 14:28:20 -0400 Subject: [PATCH 088/162] fixed editing task data and skipping tasks, moved task and task data methods from process_api_blueprint to tasks_controller, and updated to favor task_guid over task_id in some places --- .../src/spiffworkflow_backend/api.yml | 41 +---- .../routes/process_api_blueprint.py | 85 ----------- .../routes/process_instances_controller.py | 1 + .../routes/tasks_controller.py | 144 ++++++++++++++---- .../src/routes/ProcessInstanceShow.tsx | 48 +++--- 5 files changed, 145 insertions(+), 174 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index f7fa3f03..6fa28040 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -1590,7 +1590,7 @@ paths: - name: task_guid in: path required: true - description: The guid of the task to show. + description: The unique id of the task. schema: type: string get: @@ -1605,35 +1605,8 @@ paths: application/json: schema: $ref: "#/components/schemas/Task" - - /task-data/{modified_process_model_identifier}/{process_instance_id}/{task_id}: - parameters: - - name: modified_process_model_identifier - in: path - required: true - description: The modified id of an existing process model - schema: - type: string - - name: process_instance_id - in: path - required: true - description: The unique id of an existing process instance. - schema: - type: integer - - name: task_id - in: path - required: true - description: The unique id of the task. - schema: - type: string - - name: task_guid - in: path - required: true - description: The guid of the task to show. - schema: - type: string put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.task_data_update + operationId: spiffworkflow_backend.routes.tasks_controller.task_data_update summary: Update the task data for requested instance and task tags: - Process Instances @@ -1738,7 +1711,7 @@ paths: schema: $ref: "#/components/schemas/Workflow" - /task-complete/{modified_process_model_identifier}/{process_instance_id}/{task_id}: + /task-complete/{modified_process_model_identifier}/{process_instance_id}/{task_guid}: parameters: - name: modified_process_model_identifier in: path @@ -1752,14 +1725,14 @@ paths: description: The unique id of the process instance schema: type: string - - name: task_id + - name: task_guid in: path required: true description: The unique id of the task. schema: type: string post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.manual_complete_task + operationId: spiffworkflow_backend.routes.tasks_controller.manual_complete_task summary: Mark a task complete without executing it tags: - Process Instances @@ -1838,9 +1811,9 @@ paths: schema: $ref: "#/components/schemas/ServiceTask" - /tasks/{process_instance_id}/{task_id}: + /tasks/{process_instance_id}/{task_guid}: parameters: - - name: task_id + - name: task_guid in: path required: true description: The unique id of an existing process group. diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index a07f5f49..ac38eff0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -16,15 +16,9 @@ from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( ProcessEntityNotFoundError, ) -from spiffworkflow_backend.models.db import db -from spiffworkflow_backend.models.json_data import JsonDataModel from spiffworkflow_backend.models.principal import PrincipalModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema -from spiffworkflow_backend.models.process_instance import ( - ProcessInstanceTaskDataCannotBeUpdatedError, -) -from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.models.process_instance_file_data import ( ProcessInstanceFileDataModel, ) @@ -38,7 +32,6 @@ from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) from spiffworkflow_backend.services.process_model_service import ProcessModelService -from spiffworkflow_backend.services.task_service import TaskService process_api_blueprint = Blueprint("process_api", __name__) @@ -169,60 +162,6 @@ def github_webhook_receive(body: Dict) -> Response: return Response(json.dumps({"git_pull": result}), status=200, mimetype="application/json") -def task_data_update( - process_instance_id: str, - modified_process_model_identifier: str, - task_id: str, - body: Dict, -) -> Response: - """Update task data.""" - process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first() - if process_instance: - if process_instance.status != "suspended": - raise ProcessInstanceTaskDataCannotBeUpdatedError( - "The process instance needs to be suspended to update the task-data." - f" It is currently: {process_instance.status}" - ) - - task_model = TaskModel.query.filter_by(guid=task_id).first() - if task_model is None: - raise ApiError( - error_code="update_task_data_error", - message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.", - ) - - if "new_task_data" in body: - new_task_data_str: str = body["new_task_data"] - new_task_data_dict = json.loads(new_task_data_str) - json_data_dict = TaskService.update_task_data_on_task_model( - task_model, new_task_data_dict, "json_data_hash" - ) - if json_data_dict is not None: - json_data = JsonDataModel(**json_data_dict) - db.session.add(json_data) - ProcessInstanceProcessor.add_event_to_process_instance( - process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_id - ) - try: - db.session.commit() - except Exception as e: - db.session.rollback() - raise ApiError( - error_code="update_task_data_error", - message=f"Could not update the Instance. Original error is {e}", - ) from e - else: - raise ApiError( - error_code="update_task_data_error", - message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.", - ) - return Response( - json.dumps(ProcessInstanceModelSchema().dump(process_instance)), - status=200, - mimetype="application/json", - ) - - def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any: """Get_required_parameter_or_raise.""" return_value = None @@ -263,30 +202,6 @@ def send_bpmn_event( ) -def manual_complete_task( - modified_process_model_identifier: str, - process_instance_id: str, - task_id: str, - body: Dict, -) -> Response: - """Mark a task complete without executing it.""" - execute = body.get("execute", True) - process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first() - if process_instance: - processor = ProcessInstanceProcessor(process_instance) - processor.manual_complete_task(task_id, execute) - else: - raise ApiError( - error_code="complete_task", - message=f"Could not complete Task {task_id} in Instance {process_instance_id}", - ) - return Response( - json.dumps(ProcessInstanceModelSchema().dump(process_instance)), - status=200, - mimetype="application/json", - ) - - def _commit_and_push_to_git(message: str) -> None: """Commit_and_push_to_git.""" if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE"]: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index bb5cef6c..48a931c2 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -218,6 +218,7 @@ def process_instance_resume( try: processor.lock_process_instance("Web") processor.resume() + processor.do_engine_steps(save=True) except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e: ErrorHandlingService().handle_error(processor, e) raise e diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 495d22de..c49eda58 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -33,8 +33,14 @@ from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel +from spiffworkflow_backend.models.json_data import JsonDataModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance import ( + ProcessInstanceTaskDataCannotBeUpdatedError, +) +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.task import Task from spiffworkflow_backend.models.task import TaskModel # noqa: F401 @@ -56,6 +62,7 @@ from spiffworkflow_backend.services.process_instance_service import ( ) from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.spec_file_service import SpecFileService +from spiffworkflow_backend.services.task_service import TaskService class TaskDataSelectOption(TypedDict): @@ -182,24 +189,85 @@ def task_data_show( return make_response(jsonify(task_model), 200) -def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None: - if task.form_ui_schema is None: - task.form_ui_schema = {} +def task_data_update( + process_instance_id: str, + modified_process_model_identifier: str, + task_guid: str, + body: Dict, +) -> Response: + """Update task data.""" + process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first() + if process_instance: + if process_instance.status != "suspended": + raise ProcessInstanceTaskDataCannotBeUpdatedError( + "The process instance needs to be suspended to update the task-data." + f" It is currently: {process_instance.status}" + ) - if task.data and "form_ui_hidden_fields" in task.data: - hidden_fields = task.data["form_ui_hidden_fields"] - for hidden_field in hidden_fields: - hidden_field_parts = hidden_field.split(".") - relevant_depth_of_ui_schema = task.form_ui_schema - for ii, hidden_field_part in enumerate(hidden_field_parts): - if hidden_field_part not in relevant_depth_of_ui_schema: - relevant_depth_of_ui_schema[hidden_field_part] = {} - relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[hidden_field_part] - if len(hidden_field_parts) == ii + 1: - relevant_depth_of_ui_schema["ui:widget"] = "hidden" + task_model = TaskModel.query.filter_by(guid=task_guid).first() + if task_model is None: + raise ApiError( + error_code="update_task_data_error", + message=f"Could not find Task: {task_guid} in Instance: {process_instance_id}.", + ) + + if "new_task_data" in body: + new_task_data_str: str = body["new_task_data"] + new_task_data_dict = json.loads(new_task_data_str) + json_data_dict = TaskService.update_task_data_on_task_model( + task_model, new_task_data_dict, "json_data_hash" + ) + if json_data_dict is not None: + json_data = JsonDataModel(**json_data_dict) + db.session.add(json_data) + ProcessInstanceProcessor.add_event_to_process_instance( + process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_guid + ) + try: + db.session.commit() + except Exception as e: + db.session.rollback() + raise ApiError( + error_code="update_task_data_error", + message=f"Could not update the Instance. Original error is {e}", + ) from e + else: + raise ApiError( + error_code="update_task_data_error", + message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_guid}.", + ) + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) -def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response: +def manual_complete_task( + modified_process_model_identifier: str, + process_instance_id: str, + task_guid: str, + body: Dict, +) -> Response: + """Mark a task complete without executing it.""" + execute = body.get("execute", True) + process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first() + if process_instance: + processor = ProcessInstanceProcessor(process_instance) + processor.manual_complete_task(task_guid, execute) + else: + raise ApiError( + error_code="complete_task", + message=f"Could not complete Task {task_guid} in Instance {process_instance_id}", + ) + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) + + +def task_show(process_instance_id: int, task_guid: str) -> flask.wrappers.Response: """Task_show.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) @@ -214,12 +282,12 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response process_instance.process_model_identifier, ) - _find_human_task_or_raise(process_instance_id, task_id) + _find_human_task_or_raise(process_instance_id, task_guid) form_schema_file_name = "" form_ui_schema_file_name = "" processor = ProcessInstanceProcessor(process_instance) - spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance, processor=processor) + spiff_task = _get_spiff_task_from_process_instance(task_guid, process_instance, processor=processor) extensions = spiff_task.task_spec.extensions if "properties" in extensions: @@ -252,7 +320,8 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response ApiError( error_code="missing_form_file", message=( - f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}" + f"Cannot find a form file for process_instance_id: {process_instance_id}, task_guid:" + f" {task_guid}" ), status_code=400, ) @@ -319,7 +388,7 @@ def process_data_show( def task_submit_shared( process_instance_id: int, - task_id: str, + task_guid: str, body: Dict[str, Any], terminate_loop: bool = False, ) -> flask.wrappers.Response: @@ -336,7 +405,7 @@ def task_submit_shared( ) processor = ProcessInstanceProcessor(process_instance) - spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance, processor=processor) + spiff_task = _get_spiff_task_from_process_instance(task_guid, process_instance, processor=processor) AuthorizationService.assert_user_can_complete_spiff_task(process_instance.id, spiff_task, principal.user) if spiff_task.state != TaskState.READY: @@ -353,7 +422,7 @@ def task_submit_shared( human_task = _find_human_task_or_raise( process_instance_id=process_instance_id, - task_id=task_id, + task_guid=task_guid, only_tasks_that_can_be_completed=True, ) @@ -398,13 +467,13 @@ def task_submit_shared( def task_submit( process_instance_id: int, - task_id: str, + task_guid: str, body: Dict[str, Any], terminate_loop: bool = False, ) -> flask.wrappers.Response: """Task_submit_user_data.""" with sentry_sdk.start_span(op="controller_action", description="tasks_controller.task_submit"): - return task_submit_shared(process_instance_id, task_id, body, terminate_loop) + return task_submit_shared(process_instance_id, task_guid, body, terminate_loop) def _get_tasks( @@ -559,14 +628,14 @@ def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) -> def _get_spiff_task_from_process_instance( - task_id: str, + task_guid: str, process_instance: ProcessInstanceModel, processor: Union[ProcessInstanceProcessor, None] = None, ) -> SpiffTask: """Get_spiff_task_from_process_instance.""" if processor is None: processor = ProcessInstanceProcessor(process_instance) - task_uuid = uuid.UUID(task_id) + task_uuid = uuid.UUID(task_guid) spiff_task = processor.bpmn_process_instance.get_task(task_uuid) if spiff_task is None: @@ -658,15 +727,15 @@ def _get_potential_owner_usernames(assigned_user: AliasedClass) -> Any: def _find_human_task_or_raise( process_instance_id: int, - task_id: str, + task_guid: str, only_tasks_that_can_be_completed: bool = False, ) -> HumanTaskModel: if only_tasks_that_can_be_completed: human_task_query = HumanTaskModel.query.filter_by( - process_instance_id=process_instance_id, task_id=task_id, completed=False + process_instance_id=process_instance_id, task_id=task_guid, completed=False ) else: - human_task_query = HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, task_id=task_id) + human_task_query = HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, task_id=task_guid) human_task: HumanTaskModel = human_task_query.first() if human_task is None: @@ -674,10 +743,27 @@ def _find_human_task_or_raise( ApiError( error_code="no_human_task", message=( - f"Cannot find a task to complete for task id '{task_id}' and" + f"Cannot find a task to complete for task id '{task_guid}' and" f" process instance {process_instance_id}." ), status_code=500, ) ) return human_task + + +def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None: + if task.form_ui_schema is None: + task.form_ui_schema = {} + + if task.data and "form_ui_hidden_fields" in task.data: + hidden_fields = task.data["form_ui_hidden_fields"] + for hidden_field in hidden_fields: + hidden_field_parts = hidden_field.split(".") + relevant_depth_of_ui_schema = task.form_ui_schema + for ii, hidden_field_part in enumerate(hidden_field_parts): + if hidden_field_part not in relevant_depth_of_ui_schema: + relevant_depth_of_ui_schema[hidden_field_part] = {} + relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[hidden_field_part] + if len(hidden_field_parts) == ii + 1: + relevant_depth_of_ui_schema["ui:widget"] = "hidden" diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 07cb45fd..308f4bd1 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -234,10 +234,11 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return params.to_task_guid; }; - const showingLastSpiffStep = () => { - return ( - processInstance && currentToTaskGuid() === processInstance.spiff_step - ); + // right now this just assume if to_task_guid was passed in then + // this cannot be the active task. + // we may need a better way to figure this out. + const showingActiveTask = () => { + return !params.to_task_guid; }; const completionViewLink = (label: any, taskGuid: string) => { @@ -496,7 +497,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const initializeTaskDataToDisplay = (task: Task | null) => { if ( task && - task.state === 'COMPLETED' && + (task.state === 'COMPLETED' || task.state === 'READY') && ability.can('GET', targetUris.processInstanceTaskDataPath) ) { setShowTaskDataLoading(true); @@ -624,7 +625,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ability.can('PUT', targetUris.processInstanceTaskDataPath) && isActiveTask(task) && processInstance.status === 'suspended' && - showingLastSpiffStep() + showingActiveTask() ); }; @@ -637,7 +638,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ability.can('POST', targetUris.processInstanceSendEventPath) && taskTypes.filter((t) => t === task.typename).length > 0 && task.state === 'WAITING' && - showingLastSpiffStep() + showingActiveTask() ); }; @@ -647,18 +648,20 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { processInstance.status === 'suspended' && ability.can('POST', targetUris.processInstanceCompleteTaskPath) && isActiveTask(task) && - showingLastSpiffStep() + showingActiveTask() ); }; - const canResetProcess = (task: Task) => { - return ( - ability.can('POST', targetUris.processInstanceResetPath) && - processInstance && - processInstance.status === 'suspended' && - task.state === 'READY' && - !showingLastSpiffStep() - ); + const canResetProcess = (_task: Task) => { + // disabling this feature for now + return false; + // return ( + // ability.can('POST', targetUris.processInstanceResetPath) && + // processInstance && + // processInstance.status === 'suspended' && + // task.state === 'READY' && + // !showingActiveTask() + // ); }; const getEvents = (task: Task) => { @@ -714,7 +717,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { // taskToUse is copy of taskToDisplay, with taskDataToDisplay in data attribute const taskToUse: Task = { ...taskToDisplay, data: taskDataToDisplay }; HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceTaskDataPath}/${taskToUse.id}`, + path: `${targetUris.processInstanceTaskDataPath}/${taskToUse.guid}`, httpMethod: 'PUT', successCallback: saveTaskDataResult, failureCallback: addError, @@ -976,7 +979,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ): {taskToUse.state} {taskDisplayButtons(taskToUse)} - {taskToUse.state == 'COMPLETED' ? ( + {taskToUse.state === 'COMPLETED' ? (
{completionViewLink( @@ -1031,15 +1034,8 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return elements; }; - // right now this just assume if to_task_guid was passed in then - // this cannot be the active task. - // we may need a better way to figure this out. - const showingActiveTask = () => { - return !!params.to_task_guid; - }; - const viewMostRecentStateComponent = () => { - if (!showingActiveTask()) { + if (showingActiveTask()) { return null; } From 8593d54742adc4bae92c628b60261085a07eb27f Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 23 Mar 2023 15:16:39 -0400 Subject: [PATCH 089/162] pyl passes w/ burnettk --- .../routes/process_instances_controller.py | 2 -- .../spiffworkflow_backend/services/task_service.py | 1 - .../process_navigation/process_navigation.bpmn | 8 ++++---- .../integration/test_process_api.py | 14 +++++++------- 4 files changed, 11 insertions(+), 14 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 48a931c2..619aaae1 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -218,7 +218,6 @@ def process_instance_resume( try: processor.lock_process_instance("Web") processor.resume() - processor.do_engine_steps(save=True) except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e: ErrorHandlingService().handle_error(processor, e) raise e @@ -673,7 +672,6 @@ def process_instance_task_list( TaskDefinitionModel.properties_json.label("task_definition_properties_json"), # type: ignore TaskModel.guid, TaskModel.state, - TaskModel.properties_json, TaskModel.end_in_seconds, TaskModel.start_in_seconds, ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 29a45677..d3cf545c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -341,7 +341,6 @@ class TaskService: task_model["state"] = state task_model["start_in_seconds"] = None task_model["end_in_seconds"] = None - task_model["properties_json"]["state"] = getattr(TaskState, state) @classmethod def reset_task_model( diff --git a/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn b/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn index 540a0e12..d53c8184 100644 --- a/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn +++ b/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn @@ -27,7 +27,7 @@ - + Flow_1q47ol8 @@ -36,7 +36,7 @@ Flow_1w3n49n - + Flow_1vld4r2 Flow_13ai5vv @@ -44,7 +44,7 @@ "PT1H" - + Click the button. @@ -91,7 +91,7 @@ - + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 600bcb66..b0f355c8 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2619,9 +2619,9 @@ class TestProcessApi(BaseTest): headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 - end_task = next(task for task in response.json if task["type"] == "End Event") + end_task = next(task for task in response.json if task["bpmn_identifier"] == "Event_174a838") response = client.get( - f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{end_task['task_spiff_step']}", + f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{end_task['guid']}", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 @@ -2688,17 +2688,17 @@ class TestProcessApi(BaseTest): f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/task-info", headers=self.logged_in_headers(with_super_admin_user), ) - assert len(response.json) == 1 - task = response.json[0] + assert len(response.json) == 9 + human_task = next(task for task in response.json if task["bpmn_identifier"] == "manual_task_one") response = client.post( - f"/v1.0/task-complete/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{task['id']}", + f"/v1.0/task-complete/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{human_task['guid']}", headers=self.logged_in_headers(with_super_admin_user), content_type="application/json", data=json.dumps({"execute": False}), ) assert response.json["status"] == "suspended" - task_model = TaskModel.query.filter_by(guid=task["id"]).first() + task_model = TaskModel.query.filter_by(guid=human_task["guid"]).first() assert task_model is not None assert task_model.state == "COMPLETED" @@ -2707,7 +2707,7 @@ class TestProcessApi(BaseTest): headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 - assert len(response.json) == 1 + assert len(response.json) == 9 def setup_initial_groups_for_move_tests(self, client: FlaskClient, with_super_admin_user: UserModel) -> None: """Setup_initial_groups_for_move_tests.""" From 3fa1320c0c4af5b2913c14c45beeccda73cb7312 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 23 Mar 2023 16:07:25 -0400 Subject: [PATCH 090/162] save timestamps when manually completing a task and some cleanup w/ burnettk --- .../routes/tasks_controller.py | 5 +- .../services/process_instance_processor.py | 8 ++ .../src/routes/ProcessInstanceShow.tsx | 133 +++++++++++------- 3 files changed, 92 insertions(+), 54 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index c49eda58..0aa0fa7b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -218,8 +218,9 @@ def task_data_update( task_model, new_task_data_dict, "json_data_hash" ) if json_data_dict is not None: - json_data = JsonDataModel(**json_data_dict) - db.session.add(json_data) + TaskService.insert_or_update_json_data_records({json_data_dict['hash']: json_data_dict}) + # json_data = JsonDataModel(**json_data_dict) + # db.session.add(json_data) ProcessInstanceProcessor.add_event_to_process_instance( process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_guid ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 535a2be4..5791be7e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1232,6 +1232,7 @@ class ProcessInstanceProcessor: def manual_complete_task(self, task_id: str, execute: bool) -> None: """Mark the task complete optionally executing it.""" spiff_tasks_updated = {} + start_in_seconds = time.time() spiff_task = self.bpmn_process_instance.get_task(UUID(task_id)) event_type = ProcessInstanceEventType.task_skipped.value if execute: @@ -1264,6 +1265,8 @@ class ProcessInstanceProcessor: spiff_task.workflow.last_task = spiff_task spiff_tasks_updated[spiff_task.id] = spiff_task + end_in_seconds = time.time() + if isinstance(spiff_task.task_spec, EndEvent): for task in self.bpmn_process_instance.get_tasks(TaskState.DEFINITE_MASK, workflow=spiff_task.workflow): task.complete() @@ -1300,6 +1303,11 @@ class ProcessInstanceProcessor: if bpmn_process_json_data is not None: new_json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data + # spiff_task should be the main task we are completing and only it should get the timestamps + if task_model.guid == str(spiff_task.id): + task_model.start_in_seconds = start_in_seconds + task_model.end_in_seconds = end_in_seconds + new_task_models[task_model.guid] = task_model db.session.bulk_save_objects(new_task_models.values()) TaskService.insert_or_update_json_data_records(new_json_data_dicts) diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 308f4bd1..26231282 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -68,6 +68,9 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const [tasks, setTasks] = useState(null); const [tasksCallHadError, setTasksCallHadError] = useState(false); const [taskToDisplay, setTaskToDisplay] = useState(null); + const [taskToTimeTravelTo, setTaskToTimeTravelTo] = useState( + null + ); const [taskDataToDisplay, setTaskDataToDisplay] = useState(''); const [showTaskDataLoading, setShowTaskDataLoading] = useState(false); @@ -127,45 +130,58 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { } useEffect(() => { - if (permissionsLoaded) { - const processTaskFailure = () => { - setTasksCallHadError(true); - }; - let queryParams = ''; - const processIdentifier = searchParams.get('process_identifier'); - if (processIdentifier) { - queryParams = `?process_identifier=${processIdentifier}`; - } - let apiPath = '/process-instances/for-me'; - if (variant === 'all') { - apiPath = '/process-instances'; - } - HttpService.makeCallToBackend({ - path: `${apiPath}/${modifiedProcessModelId}/${params.process_instance_id}${queryParams}`, - successCallback: setProcessInstance, - }); - let taskParams = '?most_recent_tasks_only=true'; - if (typeof params.to_task_guid !== 'undefined') { - taskParams = `${taskParams}&to_task_guid=${params.to_task_guid}`; - } - const bpmnProcessGuid = searchParams.get('bpmn_process_guid'); - if (bpmnProcessGuid) { - taskParams = `${taskParams}&bpmn_process_guid=${bpmnProcessGuid}`; - } - let taskPath = ''; - if (ability.can('GET', taskListPath)) { - taskPath = `${taskListPath}${taskParams}`; - } - if (taskPath) { - HttpService.makeCallToBackend({ - path: taskPath, - successCallback: setTasks, - failureCallback: processTaskFailure, - }); - } else { - setTasksCallHadError(true); - } + if (!permissionsLoaded) { + return undefined; } + const processTaskFailure = () => { + setTasksCallHadError(true); + }; + const processTasksSuccess = (results: Task[]) => { + if (params.to_task_guid) { + const matchingTask = results.find( + (task: Task) => task.guid === params.to_task_guid + ); + if (matchingTask) { + setTaskToTimeTravelTo(matchingTask); + } + } + setTasks(results); + }; + let queryParams = ''; + const processIdentifier = searchParams.get('process_identifier'); + if (processIdentifier) { + queryParams = `?process_identifier=${processIdentifier}`; + } + let apiPath = '/process-instances/for-me'; + if (variant === 'all') { + apiPath = '/process-instances'; + } + HttpService.makeCallToBackend({ + path: `${apiPath}/${modifiedProcessModelId}/${params.process_instance_id}${queryParams}`, + successCallback: setProcessInstance, + }); + let taskParams = '?most_recent_tasks_only=true'; + if (typeof params.to_task_guid !== 'undefined') { + taskParams = `${taskParams}&to_task_guid=${params.to_task_guid}`; + } + const bpmnProcessGuid = searchParams.get('bpmn_process_guid'); + if (bpmnProcessGuid) { + taskParams = `${taskParams}&bpmn_process_guid=${bpmnProcessGuid}`; + } + let taskPath = ''; + if (ability.can('GET', taskListPath)) { + taskPath = `${taskListPath}${taskParams}`; + } + if (taskPath) { + HttpService.makeCallToBackend({ + path: taskPath, + successCallback: processTasksSuccess, + failureCallback: processTaskFailure, + }); + } else { + setTasksCallHadError(true); + } + return undefined; }, [ targetUris, params, @@ -231,14 +247,17 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; const currentToTaskGuid = () => { - return params.to_task_guid; + if (taskToTimeTravelTo) { + return taskToTimeTravelTo.guid; + } + return null; }; - // right now this just assume if to_task_guid was passed in then + // right now this just assume if taskToTimeTravelTo was passed in then // this cannot be the active task. // we may need a better way to figure this out. const showingActiveTask = () => { - return !params.to_task_guid; + return !taskToTimeTravelTo; }; const completionViewLink = (label: any, taskGuid: string) => { @@ -983,7 +1002,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
{completionViewLink( - 'View state at task completion', + 'View process instance at the time when this task was active.', taskToUse.guid )} @@ -1035,21 +1054,31 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; const viewMostRecentStateComponent = () => { - if (showingActiveTask()) { + if (!taskToTimeTravelTo) { return null; } - + const title = `${taskToTimeTravelTo.id}: ${taskToTimeTravelTo.guid}: ${taskToTimeTravelTo.bpmn_identifier}`; return ( <> - - - View at most recent state - + +

+ Viewing process instance at the time when{' '} + + + {taskToTimeTravelTo.bpmn_name || + taskToTimeTravelTo.bpmn_identifier} + + {' '} + was active.{' '} + + View current process instance state. + +


From af97fee56c01f892bcab766de3af5cf2e2a36b2f Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 23 Mar 2023 16:33:30 -0400 Subject: [PATCH 091/162] removed spiff step details w/ burnettk --- .../{4255f548bfb4_.py => 0b5dd14bfbac_.py} | 25 +--- .../src/spiffworkflow_backend/api.yml | 2 +- .../load_database_models.py | 3 - .../models/process_instance.py | 6 +- .../models/spiff_step_details.py | 37 ----- .../src/spiffworkflow_backend/models/task.py | 3 - .../routes/process_instances_controller.py | 2 - .../routes/script_unit_tests_controller.py | 1 - .../routes/tasks_controller.py | 3 +- .../delete_process_instances_with_criteria.py | 9 -- .../services/logging_service.py | 23 ---- .../services/process_instance_processor.py | 126 ------------------ .../services/process_instance_service.py | 2 - .../services/workflow_execution_service.py | 58 -------- .../components/ProcessInstanceListTable.tsx | 1 - spiffworkflow-frontend/src/interfaces.ts | 5 - .../src/routes/ProcessInstanceShow.tsx | 6 +- 17 files changed, 12 insertions(+), 300 deletions(-) rename spiffworkflow-backend/migrations/versions/{4255f548bfb4_.py => 0b5dd14bfbac_.py} (96%) delete mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py diff --git a/spiffworkflow-backend/migrations/versions/4255f548bfb4_.py b/spiffworkflow-backend/migrations/versions/0b5dd14bfbac_.py similarity index 96% rename from spiffworkflow-backend/migrations/versions/4255f548bfb4_.py rename to spiffworkflow-backend/migrations/versions/0b5dd14bfbac_.py index a66c074b..d2ef7c10 100644 --- a/spiffworkflow-backend/migrations/versions/4255f548bfb4_.py +++ b/spiffworkflow-backend/migrations/versions/0b5dd14bfbac_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: 4255f548bfb4 +Revision ID: 0b5dd14bfbac Revises: -Create Date: 2023-03-20 13:00:28.655387 +Create Date: 2023-03-23 16:25:33.288500 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. -revision = '4255f548bfb4' +revision = '0b5dd14bfbac' down_revision = None branch_labels = None depends_on = None @@ -251,7 +251,6 @@ def upgrade(): sa.Column('status', sa.String(length=50), nullable=True), sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True), sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True), - sa.Column('spiff_step', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ), sa.ForeignKeyConstraint(['bpmn_process_id'], ['bpmn_process.id'], ), sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ), @@ -347,22 +346,6 @@ def upgrade(): op.create_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), 'process_instance_queue', ['locked_at_in_seconds'], unique=False) op.create_index(op.f('ix_process_instance_queue_locked_by'), 'process_instance_queue', ['locked_by'], unique=False) op.create_index(op.f('ix_process_instance_queue_status'), 'process_instance_queue', ['status'], unique=False) - op.create_table('spiff_step_details', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('process_instance_id', sa.Integer(), nullable=False), - sa.Column('spiff_step', sa.Integer(), nullable=False), - sa.Column('task_json', sa.JSON(), nullable=False), - sa.Column('task_id', sa.String(length=50), nullable=False), - sa.Column('task_state', sa.String(length=50), nullable=False), - sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False), - sa.Column('delta_json', sa.JSON(), nullable=True), - sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=False), - sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), - sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('process_instance_id', 'spiff_step', name='process_instance_id_spiff_step') - ) - op.create_index(op.f('ix_spiff_step_details_process_instance_id'), 'spiff_step_details', ['process_instance_id'], unique=False) op.create_table('task', sa.Column('id', sa.Integer(), nullable=False), sa.Column('guid', sa.String(length=36), nullable=False), @@ -468,8 +451,6 @@ def downgrade(): op.drop_index(op.f('ix_task_json_data_hash'), table_name='task') op.drop_index(op.f('ix_task_bpmn_process_id'), table_name='task') op.drop_table('task') - op.drop_index(op.f('ix_spiff_step_details_process_instance_id'), table_name='spiff_step_details') - op.drop_table('spiff_step_details') op.drop_index(op.f('ix_process_instance_queue_status'), table_name='process_instance_queue') op.drop_index(op.f('ix_process_instance_queue_locked_by'), table_name='process_instance_queue') op.drop_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), table_name='process_instance_queue') diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 6fa28040..a4d8156c 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -1595,7 +1595,7 @@ paths: type: string get: operationId: spiffworkflow_backend.routes.tasks_controller.task_data_show - summary: Get task data for a single task in a spiff step. + summary: Get task data for a single task. tags: - Process Instances responses: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py b/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py index 52e0c573..5e78b4d3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py @@ -41,9 +41,6 @@ from spiffworkflow_backend.models.process_instance_report import ( ) # noqa: F401 from spiffworkflow_backend.models.refresh_token import RefreshTokenModel # noqa: F401 from spiffworkflow_backend.models.secret_model import SecretModel # noqa: F401 -from spiffworkflow_backend.models.spiff_step_details import ( - SpiffStepDetailsModel, -) # noqa: F401 from spiffworkflow_backend.models.user import UserModel # noqa: F401 from spiffworkflow_backend.models.group import GroupModel # noqa: F401 from spiffworkflow_backend.models.process_instance_metadata import ( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py index f9824f02..3fb8b439 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py @@ -87,6 +87,10 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): "ProcessInstanceMetadataModel", cascade="delete", ) # type: ignore + process_instance_queue = relationship( + "ProcessInstanceQueueModel", + cascade="delete", + ) # type: ignore start_in_seconds: int | None = db.Column(db.Integer, index=True) end_in_seconds: int | None = db.Column(db.Integer, index=True) @@ -96,7 +100,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): bpmn_version_control_type: str = db.Column(db.String(50)) bpmn_version_control_identifier: str = db.Column(db.String(255)) - spiff_step: int = db.Column(db.Integer) bpmn_xml_file_contents: str | None = None process_model_with_diagram_identifier: str | None = None @@ -117,7 +120,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): "bpmn_xml_file_contents": self.bpmn_xml_file_contents, "bpmn_version_control_identifier": self.bpmn_version_control_identifier, "bpmn_version_control_type": self.bpmn_version_control_type, - "spiff_step": self.spiff_step, "process_initiator_username": self.process_initiator.username, } diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py deleted file mode 100644 index 58d34095..00000000 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Spiff_step_details.""" -from dataclasses import dataclass -from typing import Union - -from sqlalchemy import ForeignKey -from sqlalchemy import UniqueConstraint -from sqlalchemy.orm import deferred - -from spiffworkflow_backend.models.db import db -from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel -from spiffworkflow_backend.models.process_instance import ProcessInstanceModel - - -@dataclass -class SpiffStepDetailsModel(SpiffworkflowBaseDBModel): - """SpiffStepDetailsModel.""" - - __tablename__ = "spiff_step_details" - __table_args__ = (UniqueConstraint("process_instance_id", "spiff_step", name="process_instance_id_spiff_step"),) - - id: int = db.Column(db.Integer, primary_key=True) - process_instance_id: int = db.Column( - ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore - ) - spiff_step: int = db.Column(db.Integer, nullable=False) - task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore - task_id: str = db.Column(db.String(50), nullable=False) - task_state: str = db.Column(db.String(50), nullable=False) - bpmn_task_identifier: str = db.Column(db.String(255), nullable=False) - delta_json: list = deferred(db.Column(db.JSON)) # type: ignore - - start_in_seconds: float = db.Column(db.DECIMAL(17, 6), nullable=False) - - # to fix mypy in 3.9 - not sure why syntax like: - # float | None - # works in other dataclass db models - end_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6)) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index dbdd429e..a1edd259 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -108,7 +108,6 @@ class Task: event_definition: Union[dict[str, Any], None] = None, call_activity_process_identifier: Optional[str] = None, calling_subprocess_task_id: Optional[str] = None, - task_spiff_step: Optional[int] = None, ): """__init__.""" self.id = id @@ -123,7 +122,6 @@ class Task: self.event_definition = event_definition self.call_activity_process_identifier = call_activity_process_identifier self.calling_subprocess_task_id = calling_subprocess_task_id - self.task_spiff_step = task_spiff_step self.data = data if self.data is None: @@ -181,7 +179,6 @@ class Task: "event_definition": self.event_definition, "call_activity_process_identifier": self.call_activity_process_identifier, "calling_subprocess_task_id": self.calling_subprocess_task_id, - "task_spiff_step": self.task_spiff_step, } @classmethod diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 619aaae1..758a48d9 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -41,7 +41,6 @@ from spiffworkflow_backend.models.process_instance_report import ( from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.task import TaskModel from spiffworkflow_backend.models.task_definition import TaskDefinitionModel from spiffworkflow_backend.models.user import UserModel @@ -448,7 +447,6 @@ def process_instance_delete( # (Pdb) db.session.delete # > - db.session.query(SpiffStepDetailsModel).filter_by(process_instance_id=process_instance.id).delete() db.session.query(ProcessInstanceQueueModel).filter_by(process_instance_id=process_instance.id).delete() db.session.delete(process_instance) db.session.commit() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py index 303dd94a..3d7ab5af 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py @@ -102,7 +102,6 @@ def script_unit_test_run( """Script_unit_test_run.""" # FIXME: We should probably clear this somewhere else but this works current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None - current_app.config["THREAD_LOCAL_DATA"].spiff_step = None python_script = _get_required_parameter_or_raise("python_script", body) input_json = _get_required_parameter_or_raise("input_json", body) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 0aa0fa7b..50a4402a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -33,7 +33,6 @@ from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel -from spiffworkflow_backend.models.json_data import JsonDataModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus @@ -218,7 +217,7 @@ def task_data_update( task_model, new_task_data_dict, "json_data_hash" ) if json_data_dict is not None: - TaskService.insert_or_update_json_data_records({json_data_dict['hash']: json_data_dict}) + TaskService.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict}) # json_data = JsonDataModel(**json_data_dict) # db.session.add(json_data) ProcessInstanceProcessor.add_event_to_process_instance( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py index a650cb48..f599d799 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py @@ -9,7 +9,6 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.script_attributes_context import ( ScriptAttributesContext, ) -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.scripts.script import Script @@ -43,14 +42,6 @@ class DeleteProcessInstancesWithCriteria(Script): rows_affected = len(results) if rows_affected > 0: - ids_to_delete = list(map(lambda r: r.id, results)) # type: ignore - - step_details = SpiffStepDetailsModel.query.filter( - SpiffStepDetailsModel.process_instance_id.in_(ids_to_delete) # type: ignore - ).all() - - for deletion in step_details: - db.session.delete(deletion) for deletion in results: db.session.delete(deletion) db.session.commit() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py index 94f3a67f..b96f98e5 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py @@ -6,7 +6,6 @@ import sys from typing import Any from typing import Optional -from flask import g from flask.app import Flask @@ -88,28 +87,6 @@ class JsonFormatter(logging.Formatter): return json.dumps(message_dict, default=str) -class SpiffFilter(logging.Filter): - """SpiffFilter.""" - - def __init__(self, app: Flask): - """__init__.""" - self.app = app - super().__init__() - - def filter(self, record: logging.LogRecord) -> bool: - """Filter.""" - tld = self.app.config["THREAD_LOCAL_DATA"] - process_instance_id = "" - if hasattr(tld, "process_instance_id"): - process_instance_id = tld.process_instance_id - setattr(record, "process_instance_id", process_instance_id) # noqa: B010 - if hasattr(tld, "spiff_step"): - setattr(record, "spiff_step", tld.spiff_step) # noqa: 8010 - if hasattr(g, "user") and g.user: - setattr(record, "current_user_id", g.user.id) # noqa: B010 - return True - - def setup_logger(app: Flask) -> None: """Setup_logger.""" upper_log_level_string = app.config["SPIFFWORKFLOW_BACKEND_LOG_LEVEL"].upper() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 5791be7e..722baa0d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -39,7 +39,6 @@ from SpiffWorkflow.bpmn.serializer.task_spec import ( # type: ignore from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore -from SpiffWorkflow.bpmn.specs.events.event_definitions import CancelEventDefinition # type: ignore from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent # type: ignore from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ignore from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore @@ -84,7 +83,6 @@ from spiffworkflow_backend.models.script_attributes_context import ( ScriptAttributesContext, ) from spiffworkflow_backend.models.spec_reference import SpecReferenceCache -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.task import TaskModel from spiffworkflow_backend.models.task import TaskNotFoundError from spiffworkflow_backend.models.task_definition import TaskDefinitionModel @@ -92,9 +90,6 @@ from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.scripts.script import Script from spiffworkflow_backend.services.custom_parser import MyCustomParser from spiffworkflow_backend.services.file_system_service import FileSystemService -from spiffworkflow_backend.services.process_instance_lock_service import ( - ProcessInstanceLockService, -) from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsAlreadyLockedError from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService from spiffworkflow_backend.services.process_model_service import ProcessModelService @@ -105,9 +100,6 @@ from spiffworkflow_backend.services.user_service import UserService from spiffworkflow_backend.services.workflow_execution_service import ( execution_strategy_named, ) -from spiffworkflow_backend.services.workflow_execution_service import ( - StepDetailLoggingDelegate, -) from spiffworkflow_backend.services.workflow_execution_service import ( TaskModelSavingDelegate, ) @@ -151,10 +143,6 @@ class MissingProcessInfoError(Exception): """MissingProcessInfoError.""" -class SpiffStepDetailIsMissingError(Exception): - pass - - class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # type: ignore def __init__(self, environment_globals: Dict[str, Any]): """BoxedTaskDataBasedScriptEngineEnvironment.""" @@ -433,7 +421,6 @@ class ProcessInstanceProcessor: """Create a Workflow Processor based on the serialized information available in the process_instance model.""" tld = current_app.config["THREAD_LOCAL_DATA"] tld.process_instance_id = process_instance_model.id - tld.spiff_step = process_instance_model.spiff_step # we want this to be the fully qualified path to the process model including all group subcomponents current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = ( @@ -814,37 +801,6 @@ class ProcessInstanceProcessor: "lane_assignment_id": lane_assignment_id, } - def spiff_step_details_mapping( - self, - spiff_task: Optional[SpiffTask] = None, - start_in_seconds: Optional[float] = None, - end_in_seconds: Optional[float] = None, - ) -> dict: - """SaveSpiffStepDetails.""" - if spiff_task is None: - # TODO: safer to pass in task vs use last task? - spiff_task = self.bpmn_process_instance.last_task - - if spiff_task is None: - return {} - - # it's only None when we're starting a human task (it's not complete yet) - if start_in_seconds is None: - start_in_seconds = time.time() - - task_json = self.get_task_dict_from_spiff_task(spiff_task) - - return { - "process_instance_id": self.process_instance_model.id, - "spiff_step": self.process_instance_model.spiff_step or 1, - "task_json": task_json, - "task_id": str(spiff_task.id), - "task_state": spiff_task.get_state_name(), - "bpmn_task_identifier": spiff_task.task_spec.name, - "start_in_seconds": start_in_seconds, - "end_in_seconds": end_in_seconds, - } - def extract_metadata(self, process_model_info: ProcessModelInfo) -> None: """Extract_metadata.""" metadata_extraction_paths = process_model_info.metadata_extraction_paths @@ -1182,14 +1138,7 @@ class ProcessInstanceProcessor: human_task_user = HumanTaskUserModel(user_id=potential_owner_id, human_task=human_task) db.session.add(human_task_user) - self.increment_spiff_step() - spiff_step_detail_mapping = self.spiff_step_details_mapping( - spiff_task=ready_or_waiting_task, start_in_seconds=time.time() - ) - spiff_step_detail = SpiffStepDetailsModel(**spiff_step_detail_mapping) - db.session.add(spiff_step_detail) db.session.commit() - # self.log_spiff_step_details(spiff_step_detail_mapping) if len(human_tasks) > 0: for at in human_tasks: @@ -1220,15 +1169,6 @@ class ProcessInstanceProcessor: # TODO: do_engine_steps without a lock self.do_engine_steps(save=True) - def add_step(self, step: Union[dict, None] = None) -> None: - """Add a spiff step.""" - if step is None: - step = self.spiff_step_details_mapping() - spiff_step_detail = SpiffStepDetailsModel(**step) - db.session.add(spiff_step_detail) - db.session.commit() - # self.log_spiff_step_details(step) - def manual_complete_task(self, task_id: str, execute: bool) -> None: """Mark the task complete optionally executing it.""" spiff_tasks_updated = {} @@ -1279,9 +1219,6 @@ class ProcessInstanceProcessor: task.complete() spiff_tasks_updated[task.id] = task - self.increment_spiff_step() - self.add_step() - for updated_spiff_task in spiff_tasks_updated.values(): bpmn_process, task_model, new_task_models, new_json_data_dicts = ( TaskService.find_or_create_task_model_from_spiff_task( @@ -1666,31 +1603,15 @@ class ProcessInstanceProcessor: db.session.add(message_instance) db.session.commit() - def increment_spiff_step(self) -> None: - """Spiff_step++.""" - spiff_step = self.process_instance_model.spiff_step or 0 - spiff_step += 1 - self.process_instance_model.spiff_step = spiff_step - current_app.config["THREAD_LOCAL_DATA"].spiff_step = spiff_step - db.session.add(self.process_instance_model) - def do_engine_steps( self, exit_at: None = None, save: bool = False, execution_strategy_name: Optional[str] = None, ) -> None: - # NOTE: To avoid saving spiff step details, just comment out this function and the step_delegate and - # set the TaskModelSavingDelegate's secondary_engine_step_delegate to None. - def spiff_step_details_mapping_builder(task: SpiffTask, start: float, end: float) -> dict: - self._script_engine.environment.revise_state_with_task_data(task) - return self.spiff_step_details_mapping(task, start, end) - self._add_bpmn_process_definitions() - step_delegate = StepDetailLoggingDelegate(self.increment_spiff_step, spiff_step_details_mapping_builder) task_model_delegate = TaskModelSavingDelegate( - secondary_engine_step_delegate=step_delegate, serializer=self._serializer, process_instance=self.process_instance_model, bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, @@ -1718,31 +1639,6 @@ class ProcessInstanceProcessor: ): self._script_engine.failing_spiff_task = None - # log the spiff step details so we know what is processing the process - # instance when a human task has a timer event. - def log_spiff_step_details(self, step_details: Any) -> None: - if ProcessInstanceLockService.has_lock(self.process_instance_model.id): - locked_by = ProcessInstanceLockService.locked_by() - message = f"ADDING SPIFF BULK STEP DETAILS: {locked_by}: {step_details}" - current_app.logger.debug(message) - - def cancel_notify(self) -> None: - """Cancel_notify.""" - self.__cancel_notify(self.bpmn_process_instance) - - @staticmethod - def __cancel_notify(bpmn_process_instance: BpmnWorkflow) -> None: - """__cancel_notify.""" - try: - # A little hackly, but make the bpmn_process_instance catch a cancel event. - bpmn_process_instance.signal("cancel") # generate a cancel signal. - bpmn_process_instance.catch(CancelEventDefinition()) - # Due to this being static, can't save granular step details in this case - # TODO: do_engine_steps without a lock - bpmn_process_instance.do_engine_steps() - except WorkflowTaskException as we: - raise ApiError.from_workflow_exception("task_error", str(we), we) from we - @classmethod def get_tasks_with_data(cls, bpmn_process_instance: BpmnWorkflow) -> List[SpiffTask]: return [task for task in bpmn_process_instance.get_tasks(TaskState.FINISHED_MASK) if len(task.data) > 0] @@ -1891,28 +1787,6 @@ class ProcessInstanceProcessor: human_task.task_status = spiff_task.get_state_name() db.session.add(human_task) - # FIXME: remove when we switch over to using tasks only - details_model = ( - SpiffStepDetailsModel.query.filter_by( - process_instance_id=self.process_instance_model.id, - task_id=str(spiff_task.id), - task_state="READY", - ) - .order_by(SpiffStepDetailsModel.id.desc()) # type: ignore - .first() - ) - if details_model is None: - raise SpiffStepDetailIsMissingError( - "Cannot find a ready spiff_step_detail entry for process instance" - f" {self.process_instance_model.id} and task_id is {spiff_task.id}" - ) - - details_model.task_state = spiff_task.get_state_name() - details_model.end_in_seconds = time.time() - details_model.task_json = self.get_task_dict_from_spiff_task(spiff_task) - db.session.add(details_model) - # ####### - json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self._serializer) for json_data_dict in json_data_dict_list: if json_data_dict is not None: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index 4daabd58..ed2ea918 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -404,7 +404,6 @@ class ProcessInstanceService: spiff_task: SpiffTask, add_docs_and_forms: bool = False, calling_subprocess_task_id: Optional[str] = None, - task_spiff_step: Optional[int] = None, ) -> Task: """Spiff_task_to_api_task.""" task_type = spiff_task.task_spec.spec_type @@ -443,7 +442,6 @@ class ProcessInstanceService: event_definition=serialized_task_spec.get("event_definition"), call_activity_process_identifier=call_activity_process_identifier, calling_subprocess_task_id=calling_subprocess_task_id, - task_spiff_step=task_spiff_step, ) return task diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index b8983f1d..4d933418 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -1,7 +1,6 @@ import logging import time from typing import Callable -from typing import List from typing import Optional from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore @@ -19,7 +18,6 @@ from spiffworkflow_backend.models.message_instance_correlation import ( from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.task import TaskModel # noqa: F401 from spiffworkflow_backend.services.assertion_service import safe_assertion from spiffworkflow_backend.services.process_instance_lock_service import ( @@ -45,10 +43,6 @@ class EngineStepDelegate: pass -SpiffStepIncrementer = Callable[[], None] -SpiffStepDetailsMappingBuilder = Callable[[SpiffTask, float, float], dict] - - class TaskModelSavingDelegate(EngineStepDelegate): """Engine step delegate that takes care of saving a task model to the database. @@ -167,58 +161,6 @@ class TaskModelSavingDelegate(EngineStepDelegate): return task_model -class StepDetailLoggingDelegate(EngineStepDelegate): - """Engine step delegate that takes care of logging spiff step details. - - This separates the concerns of step execution and step logging. - """ - - def __init__( - self, - increment_spiff_step: SpiffStepIncrementer, - spiff_step_details_mapping: SpiffStepDetailsMappingBuilder, - ): - """__init__.""" - self.increment_spiff_step = increment_spiff_step - self.spiff_step_details_mapping = spiff_step_details_mapping - self.step_details: List[dict] = [] - self.current_task_start_in_seconds = 0.0 - self.tasks_to_log = { - "BPMN Task", - "Script Task", - "Service Task", - "Default Start Event", - "Exclusive Gateway", - "Call Activity", - # "End Join", - "End Event", - "Default Throwing Event", - "Subprocess", - "Transactional Subprocess", - } - - def should_log(self, spiff_task: SpiffTask) -> bool: - return spiff_task.task_spec.spec_type in self.tasks_to_log and not spiff_task.task_spec.name.endswith( - ".EndJoin" - ) - - def will_complete_task(self, spiff_task: SpiffTask) -> None: - if self.should_log(spiff_task): - self.current_task_start_in_seconds = time.time() - self.increment_spiff_step() - - def did_complete_task(self, spiff_task: SpiffTask) -> None: - if self.should_log(spiff_task): - self.step_details.append( - self.spiff_step_details_mapping(spiff_task, self.current_task_start_in_seconds, time.time()) - ) - - def save(self, _bpmn_process_instance: BpmnWorkflow, commit: bool = True) -> None: - db.session.bulk_insert_mappings(SpiffStepDetailsModel, self.step_details) - if commit: - db.session.commit() - - class ExecutionStrategy: """Interface of sorts for a concrete execution strategy.""" diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index eb4f17bf..335e6a89 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -1295,7 +1295,6 @@ export default function ProcessInstanceListTable({ end_in_seconds: 'End Time', status: 'Status', process_initiator_username: 'Started By', - spiff_step: 'SpiffWorkflow Step', }; const getHeaderLabel = (header: string) => { return headerLabels[header] ?? header; diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index 2b1a457d..1d34054d 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -53,9 +53,6 @@ export interface Task { task_definition_properties_json: TaskDefinitionPropertiesJson; event_definition?: EventDefinition; - - // TOOD: DELETE THIS! - task_spiff_step?: number; } export interface TaskIds { @@ -88,7 +85,6 @@ export interface ProcessInstanceTask { type: string; updated_at_in_seconds: number; - task_spiff_step?: number; potential_owner_usernames?: string; assigned_user_group_identifier?: string; } @@ -132,7 +128,6 @@ export interface ProcessInstance { end_in_seconds: number | null; process_initiator_username: string; bpmn_xml_file_contents?: string; - spiff_step?: number; created_at_in_seconds: number; updated_at_in_seconds: number; bpmn_version_control_identifier: string; diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 26231282..feaa4173 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -286,14 +286,14 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const returnToLastSpiffStep = () => { + const returnToProcessInstance = () => { window.location.href = processInstanceShowPageBaseUrl; }; const resetProcessInstance = () => { HttpService.makeCallToBackend({ path: `${targetUris.processInstanceResetPath}/${currentToTaskGuid()}`, - successCallback: returnToLastSpiffStep, + successCallback: returnToProcessInstance, httpMethod: 'POST', }); }; @@ -763,7 +763,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { HttpService.makeCallToBackend({ path: `/task-complete/${modifiedProcessModelId}/${params.process_instance_id}/${taskToDisplay.guid}`, httpMethod: 'POST', - successCallback: returnToLastSpiffStep, + successCallback: returnToProcessInstance, postBody: { execute }, }); } From 097a35cd4586c09513e0b4e187b72a59741b1131 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 23 Mar 2023 17:34:08 -0400 Subject: [PATCH 092/162] attempted to fix some cypress tests --- spiffworkflow-frontend/cypress/e2e/process_instances.cy.js | 5 ++++- spiffworkflow-frontend/cypress/support/commands.js | 4 ++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js b/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js index 64e0418a..b1b87c46 100644 --- a/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js +++ b/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js @@ -162,7 +162,7 @@ describe('process-instances', () => { cy.getBySel('process-instance-log-list-link').click(); cy.getBySel('process-instance-log-detailed').click(); cy.contains('process_model_one'); - cy.contains('State change to COMPLETED'); + cy.contains('task_completed'); cy.basicPaginationTest(); }); @@ -184,9 +184,12 @@ describe('process-instances', () => { cy.getBySel(`process-instance-status-${processStatus}`); // there should really only be one, but in CI there are sometimes more cy.get('div[aria-label="Clear all selected items"]:first').click(); + cy.wait(1000); cy.get('div[aria-label="Clear all selected items"]').should( 'not.exist' ); + // it seems like the state isn't clearing as quickly as the clear label so let's wait + cy.wait(1000); } }); diff --git a/spiffworkflow-frontend/cypress/support/commands.js b/spiffworkflow-frontend/cypress/support/commands.js index 404c9af7..f2d96939 100644 --- a/spiffworkflow-frontend/cypress/support/commands.js +++ b/spiffworkflow-frontend/cypress/support/commands.js @@ -154,6 +154,10 @@ Cypress.Commands.add( .then(($element) => { const oldId = $element.text().trim(); cy.get('.cds--pagination__button--forward').click(); + cy.contains( + `[data-qa=${dataQaTagToUseToEnsureTableHasLoaded}]`, + oldId + ).should('not.exist'); cy.contains(/\b3–4 of \d+/); cy.get('.cds--pagination__button--backward').click(); cy.contains(/\b1–2 of \d+/); From 5813af1a17baa554077ba4a1664e3b57c2fca17b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Soko=C5=82owski?= Date: Thu, 23 Mar 2023 22:54:49 +0100 Subject: [PATCH 093/162] ci: add discord notifications to at build success MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Jakub Sokołowski --- Jenkinsfile | 55 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/Jenkinsfile b/Jenkinsfile index 01819634..268239fe 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -32,6 +32,11 @@ pipeline { description: 'ID of Jenkins credential for Docker registry.', defaultValue: params.DOCKER_CRED_ID ?: 'MISSING' ) + string( + name: 'DISCORD_WEBHOOK_CRED', + description: 'Name of cretential with Discord webhook', + defaultValue: params.DISCORD_WEBHOOK_CRED ?: "", + ) booleanParam( name: 'PUBLISH', description: 'Publish built Docker images.', @@ -61,6 +66,16 @@ pipeline { image.push(env.DOCKER_TAG) } } } + post { + success { script { + if (params.DISCORD_WEBHOOK_CRED) { + discordNotify( + header: 'SpiffWorkflow Docker image published!', + cred: params.DISCORD_WEBHOOK_CRED, + ) + } + } } + } } } // stages post { @@ -68,3 +83,43 @@ pipeline { cleanup { cleanWs() } } // post } // pipeline + +def discordNotify(Map args=[:]) { + def opts = [ + header: args.header ?: 'Deployment successful!', + title: args.title ?: "${env.JOB_NAME}#${env.BUILD_NUMBER}", + cred: args.cred ?: null, + ] + def repo = [ + url: GIT_URL.minus('.git'), + branch: GIT_BRANCH.minus('origin/'), + commit: GIT_COMMIT.take(8), + prev: ( + env.GIT_PREVIOUS_SUCCESSFUL_COMMIT ?: env.GIT_PREVIOUS_COMMIT ?: 'master' + ).take(8), + ] + wrap([$class: 'BuildUser']) { + BUILD_USER_ID = env.BUILD_USER_ID + } + withCredentials([ + string( + credentialsId: opts.cred, + variable: 'DISCORD_WEBHOOK', + ), + ]) { + discordSend( + link: env.BUILD_URL, + result: currentBuild.currentResult, + webhookURL: env.DISCORD_WEBHOOK, + title: opts.title, + description: """ + ${opts.header} + Image: [`${params.DOCKER_NAME}:${params.DOCKER_TAG}`](https://hub.docker.com/r/${params.DOCKER_NAME}/tags?name=${params.DOCKER_TAG}) + Branch: [`${repo.branch}`](${repo.url}/commits/${repo.branch}) + Commit: [`${repo.commit}`](${repo.url}/commit/${repo.commit}) + Diff: [`${repo.prev}...${repo.commit}`](${repo.url}/compare/${repo.prev}...${repo.commit}) + By: [`${BUILD_USER_ID}`](${repo.url}/commits?author=${BUILD_USER_ID}) + """, + ) + } +} From 037d287f4fe655597c7ff648e9d30843e0b31e56 Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 24 Mar 2023 09:21:23 -0400 Subject: [PATCH 094/162] fixed cypress tests for tasks --- .../cypress/e2e/tasks.cy.js | 25 ++++++++++--------- .../src/components/TaskListTable.tsx | 2 +- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/spiffworkflow-frontend/cypress/e2e/tasks.cy.js b/spiffworkflow-frontend/cypress/e2e/tasks.cy.js index 06e59d81..a4b4a4dd 100644 --- a/spiffworkflow-frontend/cypress/e2e/tasks.cy.js +++ b/spiffworkflow-frontend/cypress/e2e/tasks.cy.js @@ -50,18 +50,19 @@ describe('tasks', () => { ); cy.contains('Task: get_user_generated_number_three'); - cy.getBySel('form-nav-form2').click(); - checkFormFieldIsReadOnly( - 'get_user_generated_number_two', - '#root_user_generated_number_2' - ); - cy.getBySel('form-nav-form1').click(); - checkFormFieldIsReadOnly( - 'get_user_generated_number_one', - '#root_user_generated_number_1' - ); - - cy.getBySel('form-nav-form3').click(); + // TODO: remove this if we decide to completely kill form navigation + // cy.getBySel('form-nav-form2').click(); + // checkFormFieldIsReadOnly( + // 'get_user_generated_number_two', + // '#root_user_generated_number_2' + // ); + // cy.getBySel('form-nav-form1').click(); + // checkFormFieldIsReadOnly( + // 'get_user_generated_number_one', + // '#root_user_generated_number_1' + // ); + // + // cy.getBySel('form-nav-form3').click(); submitInputIntoFormField( 'get_user_generated_number_three', '#root_user_generated_number_3', diff --git a/spiffworkflow-frontend/src/components/TaskListTable.tsx b/spiffworkflow-frontend/src/components/TaskListTable.tsx index 26577b3b..1951b4cc 100644 --- a/spiffworkflow-frontend/src/components/TaskListTable.tsx +++ b/spiffworkflow-frontend/src/components/TaskListTable.tsx @@ -141,7 +141,7 @@ export default function TaskListTable({ rowElements.push( From 1da8e43066741b71e8f442c0ce1734ed88219627 Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 24 Mar 2023 11:10:04 -0400 Subject: [PATCH 095/162] fixed cypress process instance test w/ burnettk --- spiffworkflow-frontend/cypress/e2e/process_instances.cy.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js b/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js index b1b87c46..aa0c6626 100644 --- a/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js +++ b/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js @@ -182,14 +182,14 @@ describe('process-instances', () => { cy.url().should('include', `status=${processStatus}`); cy.assertAtLeastOneItemInPaginatedResults(); cy.getBySel(`process-instance-status-${processStatus}`); + + // maybe waiting a bit before trying to click makes this work consistently? + cy.wait(1000); // there should really only be one, but in CI there are sometimes more cy.get('div[aria-label="Clear all selected items"]:first').click(); - cy.wait(1000); cy.get('div[aria-label="Clear all selected items"]').should( 'not.exist' ); - // it seems like the state isn't clearing as quickly as the clear label so let's wait - cy.wait(1000); } }); From 30e4faa12f4f79ae9c46ef87d9216cd94a33e3dc Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 24 Mar 2023 11:11:44 -0400 Subject: [PATCH 096/162] removed debug comment w/ burnettk --- .../spiffworkflow_backend/unit/test_error_handling_service.py | 1 - 1 file changed, 1 deletion(-) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py index d41ae3e9..adbd2240 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py @@ -91,7 +91,6 @@ class TestErrorHandlingService(BaseTest): # Both send and receive messages should be generated, matched # and considered complete. messages = db.session.query(MessageInstanceModel).all() - # import pdb; pdb.set_trace() assert 2 == len(messages) assert "completed" == messages[0].status assert "completed" == messages[1].status From 042c86f78b5f4517e0d0626d1c1aef7506f7f538 Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 24 Mar 2023 11:19:24 -0400 Subject: [PATCH 097/162] treat open differently --- spiffworkflow-frontend/bin/cypress_pilot | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/spiffworkflow-frontend/bin/cypress_pilot b/spiffworkflow-frontend/bin/cypress_pilot index ecf50455..89058454 100755 --- a/spiffworkflow-frontend/bin/cypress_pilot +++ b/spiffworkflow-frontend/bin/cypress_pilot @@ -56,6 +56,9 @@ for attempt in $(seq 1 "$ATTEMPTS" ); do formatted_end_time=$(date "-d@${end_time}" +"%Y-%m-%dT%H-%M-%S") fi - echo "${success},$(( end_time - start_time )),${formatted_start_time},${formatted_end_time},${frontend_url}" >>"$cypress_run_file" + if [[ "$command" != "open" ]]; then + echo "Recording stats to ${cypress_run_file}" + echo "${success},$(( end_time - start_time )),${formatted_start_time},${formatted_end_time},${frontend_url}" >>"$cypress_run_file" + fi done echo "Recorded stats to ${cypress_run_file}" From 199cf05960b61af0252ff2d4d6c8d82e98cdd325 Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 24 Mar 2023 12:09:27 -0400 Subject: [PATCH 098/162] couple fixes for running a process model from script w/ burnettk --- spiffworkflow-backend/bin/run_process_model_with_api | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-backend/bin/run_process_model_with_api b/spiffworkflow-backend/bin/run_process_model_with_api index c62e43ad..a0ab07bc 100755 --- a/spiffworkflow-backend/bin/run_process_model_with_api +++ b/spiffworkflow-backend/bin/run_process_model_with_api @@ -30,7 +30,7 @@ modified_process_model_identifier=$(tr '/' ':' <<<"$process_model_identifier") function check_result_for_error() { local result="$1" error_code=$(jq '.error_code' <<<"$result") - if [[ -n "$error_code" ]]; then + if [[ -n "$error_code" && "$error_code" != "null" ]]; then >&2 echo "ERROR: Failed to run process instance. Received error: $result" exit 1 fi @@ -58,7 +58,7 @@ function process_next_task() { access_token=$("${script_dir}/get_token" "$username" "$password" "$realm_name") curl --silent -X POST "${BACKEND_BASE_URL}/v1.0/login_with_access_token?access_token=${access_token}" -H "Authorization: Bearer $access_token" >/dev/null result=$(curl --silent -X POST "${BACKEND_BASE_URL}/v1.0/process-instances/${modified_process_model_identifier}" -H "Authorization: Bearer $access_token") -process_instance_id=$(jq '.id' <<<"$result") +process_instance_id=$(jq -r '.id' <<<"$result") if ! grep -qE '^[0-9]+$' <<<"$process_instance_id"; then >&2 echo "ERROR: Did not receive valid process instance id when instantiating process model. result was ${result}" exit 1 From ba067b320d7e43b1aecd83dcd60e26e1177bda7a Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 24 Mar 2023 17:54:37 -0400 Subject: [PATCH 099/162] filter report column list by process_model_identifier if any --- .../src/spiffworkflow_backend/api.yml | 9 ++++++++- .../routes/process_instances_controller.py | 12 +++++++++--- .../src/components/ProcessInstanceListTable.tsx | 15 ++++++++++++--- 3 files changed, 29 insertions(+), 7 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index a4d8156c..06f482bc 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -1251,9 +1251,16 @@ paths: $ref: "#/components/schemas/OkTrue" /process-instances/reports/columns: + parameters: + - name: process_model_identifier + in: query + required: false + description: The process model identifier to filter by + schema: + type: string get: operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_column_list - summary: Returns all available columns for a process instance report. + summary: Returns all available columns for a process instance report, including custom metadata tags: - Process Instances responses: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 758a48d9..aa6b3cc8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -390,15 +390,21 @@ def process_instance_list( return make_response(jsonify(response_json), 200) -def process_instance_report_column_list() -> flask.wrappers.Response: +def process_instance_report_column_list(process_model_identifier: Optional[str] = None) -> flask.wrappers.Response: """Process_instance_report_column_list.""" table_columns = ProcessInstanceReportService.builtin_column_options() - columns_for_metadata = ( + columns_for_metadata_query = ( db.session.query(ProcessInstanceMetadataModel.key) .order_by(ProcessInstanceMetadataModel.key) .distinct() # type: ignore - .all() ) + if process_model_identifier: + columns_for_metadata_query = columns_for_metadata_query.join(ProcessInstanceModel) + columns_for_metadata_query = columns_for_metadata_query.filter( + ProcessInstanceModel.process_model_identifier == process_model_identifier + ) + + columns_for_metadata = columns_for_metadata_query.all() columns_for_metadata_strings = [ {"Header": i[0], "accessor": i[0], "filterable": True} for i in columns_for_metadata ] diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 335e6a89..870dc440 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -140,6 +140,7 @@ export default function ProcessInstanceListTable({ const [endFromTimeInvalid, setEndFromTimeInvalid] = useState(false); const [endToTimeInvalid, setEndToTimeInvalid] = useState(false); const [requiresRefilter, setRequiresRefilter] = useState(false); + const [lastColumnFilter, setLastColumnFilter] = useState(''); const processInstanceListPathPrefix = variant === 'all' @@ -1105,10 +1106,18 @@ export default function ProcessInstanceListTable({ return null; } - // get the columns anytime we display the filter options if they are empty - if (availableReportColumns.length < 1) { + let queryParamString = ''; + if (processModelSelection) { + queryParamString += `?process_model_identifier=${processModelSelection.id}`; + } + // get the columns anytime we display the filter options if they are empty. + // and if the columns are not empty, check if the columns are stale + // because we selected a different process model in the filter options. + const columnFilterIsStale = lastColumnFilter !== queryParamString; + if (availableReportColumns.length < 1 || columnFilterIsStale) { + setLastColumnFilter(queryParamString); HttpService.makeCallToBackend({ - path: `/process-instances/reports/columns`, + path: `/process-instances/reports/columns${queryParamString}`, successCallback: setAvailableReportColumns, }); } From 5452d6be5073e0be205d6d1c503e9b26a9ab5788 Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 24 Mar 2023 18:11:11 -0400 Subject: [PATCH 100/162] add test for filtering columns and clarify that we are doing unit tests with api, whoops --- .../helpers/base_test.py | 28 ++++++- .../integration/test_logging_service.py | 4 +- .../integration/test_process_api.py | 74 ++++++++++++++----- .../integration/test_secret_service.py | 2 +- .../scripts/test_get_group_members.py | 2 +- .../test_get_last_user_completing_task.py | 2 +- .../scripts/test_get_localtime.py | 2 +- .../test_get_process_initiator_user.py | 2 +- .../test_save_process_instance_metadata.py | 2 +- .../unit/test_message_service.py | 4 +- .../unit/test_permissions.py | 2 +- .../unit/test_process_instance_processor.py | 10 +-- .../unit/test_process_model.py | 22 +----- .../unit/test_process_model_service.py | 2 +- .../unit/test_restricted_script_engine.py | 4 +- .../unit/test_script_unit_test_runner.py | 8 +- 16 files changed, 111 insertions(+), 59 deletions(-) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py index 5f483fdd..6b4d0143 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py @@ -78,7 +78,7 @@ class BaseTest: if bpmn_file_location is None: bpmn_file_location = process_model_id - self.create_process_group(client, user, process_group_description, process_group_display_name) + self.create_process_group_with_api(client, user, process_group_description, process_group_display_name) self.create_process_model_with_api( client, @@ -97,6 +97,15 @@ class BaseTest: return process_model_identifier def create_process_group( + self, + process_group_id: str, + display_name: str = "", + ) -> ProcessGroup: + """Create_process_group.""" + process_group = ProcessGroup(id=process_group_id, display_name=display_name, display_order=0, admin=False) + return ProcessModelService.add_process_group(process_group) + + def create_process_group_with_api( self, client: FlaskClient, user: Any, @@ -353,3 +362,20 @@ class BaseTest: def un_modify_modified_process_identifier_for_path_param(self, modified_identifier: str) -> str: """Un_modify_modified_process_model_id.""" return modified_identifier.replace(":", "/") + + def create_process_model_with_metadata(self) -> ProcessModelInfo: + self.create_process_group("test_group", "test_group") + process_model = load_test_spec( + "test_group/hello_world", + process_model_source_directory="nested-task-data-structure", + ) + ProcessModelService.update_process_model( + process_model, + { + "metadata_extraction_paths": [ + {"key": "awesome_var", "path": "outer.inner"}, + {"key": "invoice_number", "path": "invoice_number"}, + ] + }, + ) + return process_model diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py index f79a3295..41f30563 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py @@ -25,7 +25,7 @@ class TestLoggingService(BaseTest): with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") assert initiator_user.principal is not None AuthorizationService.import_permissions_from_yaml_file() @@ -85,7 +85,7 @@ class TestLoggingService(BaseTest): with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") assert initiator_user.principal is not None AuthorizationService.import_permissions_from_yaml_file() diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index b0f355c8..89fda503 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -118,7 +118,7 @@ class TestProcessApi(BaseTest): process_group_id = "test_process_group" process_group_display_name = "Test Process Group" # creates the group directory, and the json file - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_display_name) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_display_name) process_model_id = "sample" model_display_name = "Sample" @@ -169,7 +169,7 @@ class TestProcessApi(BaseTest): process_group_description = "Test Process Group" process_model_id = "sample" process_model_identifier = f"{process_group_id}/{process_model_id}" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_description) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_description) text = "Create a Bug Tracker process model " text += "with a Bug Details form that collects summary, description, and priority" @@ -237,7 +237,9 @@ class TestProcessApi(BaseTest): process_model_identifier = f"{process_group_id}/{process_model_id}" initial_primary_process_id = "sample" terminal_primary_process_id = "new_process_id" - self.create_process_group(client=client, user=with_super_admin_user, process_group_id=process_group_id) + self.create_process_group_with_api( + client=client, user=with_super_admin_user, process_group_id=process_group_id + ) bpmn_file_name = f"{process_model_id}.bpmn" bpmn_file_source_directory = process_model_id @@ -281,7 +283,7 @@ class TestProcessApi(BaseTest): process_group_description = "Test Process Group" process_model_id = "sample" process_model_identifier = f"{process_group_id}/{process_model_id}" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_description) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_description) self.create_process_model_with_api( client, process_model_id=process_model_identifier, @@ -317,7 +319,7 @@ class TestProcessApi(BaseTest): bpmn_file_location = "sample" process_model_identifier = f"{test_process_group_id}/{test_process_model_id}" modified_process_model_identifier = process_model_identifier.replace("/", ":") - self.create_process_group(client, with_super_admin_user, test_process_group_id) + self.create_process_group_with_api(client, with_super_admin_user, test_process_group_id) self.create_process_model_with_api(client, process_model_identifier, user=with_super_admin_user) bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, bpmn_file_location) self.create_spec_file( @@ -362,7 +364,7 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_process_model_update.""" - self.create_process_group(client, with_super_admin_user, "test_process_group", "Test Process Group") + self.create_process_group_with_api(client, with_super_admin_user, "test_process_group", "Test Process Group") process_model_identifier = "test_process_group/make_cookies" self.create_process_model_with_api( client, @@ -403,7 +405,7 @@ class TestProcessApi(BaseTest): ) -> None: """Test_process_model_list_all.""" group_id = "test_group/test_sub_group" - self.create_process_group(client, with_super_admin_user, group_id) + self.create_process_group_with_api(client, with_super_admin_user, group_id) # add 5 models to the group for i in range(5): @@ -439,7 +441,7 @@ class TestProcessApi(BaseTest): """Test_process_model_list.""" # create a group group_id = "test_group" - self.create_process_group(client, with_super_admin_user, group_id) + self.create_process_group_with_api(client, with_super_admin_user, group_id) # add 5 models to the group for i in range(5): @@ -603,7 +605,7 @@ class TestProcessApi(BaseTest): process_group_id = "test" process_group_display_name = "My Process Group" - self.create_process_group( + self.create_process_group_with_api( client, with_super_admin_user, process_group_id, @@ -632,7 +634,7 @@ class TestProcessApi(BaseTest): group_id = "test_process_group" group_display_name = "Test Group" - self.create_process_group(client, with_super_admin_user, group_id, display_name=group_display_name) + self.create_process_group_with_api(client, with_super_admin_user, group_id, display_name=group_display_name) process_group = ProcessModelService.get_process_group(group_id) assert process_group.display_name == group_display_name @@ -662,7 +664,9 @@ class TestProcessApi(BaseTest): for i in range(5): group_id = f"test_process_group_{i}" group_display_name = f"Test Group {i}" - self.create_process_group(client, with_super_admin_user, group_id, display_name=group_display_name) + self.create_process_group_with_api( + client, with_super_admin_user, group_id, display_name=group_display_name + ) # get all groups response = client.get( @@ -787,7 +791,7 @@ class TestProcessApi(BaseTest): process_group_description = "Test Group" process_model_id = "random_fact" process_model_identifier = f"{process_group_id}/{process_model_id}" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_description) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_description) self.create_process_model_with_api( client, process_model_id=process_model_identifier, @@ -1091,7 +1095,7 @@ class TestProcessApi(BaseTest): ) -> None: """Test_get_process_model_when_not_found.""" process_model_dir_name = "THIS_NO_EXISTS" - group_id = self.create_process_group(client, with_super_admin_user, "my_group") + group_id = self.create_process_group_with_api(client, with_super_admin_user, "my_group") bad_process_model_id = f"{group_id}/{process_model_dir_name}" modified_bad_process_model_id = bad_process_model_id.replace("/", ":") response = client.get( @@ -2714,7 +2718,7 @@ class TestProcessApi(BaseTest): groups = ["group_a", "group_b", "group_b/group_bb"] # setup initial groups for group in groups: - self.create_process_group(client, with_super_admin_user, group, display_name=group) + self.create_process_group_with_api(client, with_super_admin_user, group, display_name=group) # make sure initial groups exist for group in groups: persisted = ProcessModelService.get_process_group(group) @@ -2783,7 +2787,7 @@ class TestProcessApi(BaseTest): sub_group_id = "sub_group" original_location = "group_a" original_sub_path = f"{original_location}/{sub_group_id}" - self.create_process_group(client, with_super_admin_user, original_sub_path, display_name=sub_group_id) + self.create_process_group_with_api(client, with_super_admin_user, original_sub_path, display_name=sub_group_id) # make sure original subgroup exists persisted = ProcessModelService.get_process_group(original_sub_path) assert persisted is not None @@ -2835,7 +2839,7 @@ class TestProcessApi(BaseTest): # ) # # process_group_id = "test_group" - # self.create_process_group( + # self.create_process_group_with_api( # client, with_super_admin_user, process_group_id, process_group_id # ) # @@ -3077,6 +3081,18 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_get_process_instance_list_with_report_metadata.""" + process_model = self.create_process_model_with_metadata() + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=with_super_admin_user + ) + + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + process_instance_metadata = ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=process_instance.id + ).all() + assert len(process_instance_metadata) == 2 + process_model = load_test_spec( process_model_id="save_process_instance_metadata/save_process_instance_metadata", bpmn_file_name="save_process_instance_metadata.bpmn", @@ -3115,11 +3131,35 @@ class TestProcessApi(BaseTest): "filterable": False, }, {"Header": "Status", "accessor": "status", "filterable": False}, + {"Header": "awesome_var", "accessor": "awesome_var", "filterable": True}, + {"Header": "invoice_number", "accessor": "invoice_number", "filterable": True}, {"Header": "key1", "accessor": "key1", "filterable": True}, {"Header": "key2", "accessor": "key2", "filterable": True}, {"Header": "key3", "accessor": "key3", "filterable": True}, ] + # pluck accessor from each dict in list + accessors = [column["accessor"] for column in response.json] + stock_columns = [ + "id", + "process_model_display_name", + "start_in_seconds", + "end_in_seconds", + "process_initiator_username", + "status", + ] + assert accessors == stock_columns + ["awesome_var", "invoice_number", "key1", "key2", "key3"] + + # expected columns are fewer if we filter by process_model_identifier + response = client.get( + "/v1.0/process-instances/reports/columns?process_model_identifier=save_process_instance_metadata/save_process_instance_metadata", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert response.json is not None + assert response.status_code == 200 + accessors = [column["accessor"] for column in response.json] + assert accessors == stock_columns + ["key1", "key2", "key3"] + def test_process_instance_list_can_order_by_metadata( self, app: Flask, @@ -3128,7 +3168,7 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_process_instance_list_can_order_by_metadata.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/hello_world", process_model_source_directory="nested-task-data-structure", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_secret_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_secret_service.py index 3e19607d..e12a1dd5 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_secret_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_secret_service.py @@ -34,7 +34,7 @@ class SecretServiceTestHelpers(BaseTest): def add_test_process(self, client: FlaskClient, user: UserModel) -> ProcessModelInfo: """Add_test_process.""" - self.create_process_group( + self.create_process_group_with_api( client, user, self.test_process_group_id, diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_group_members.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_group_members.py index 3a128cff..685788c3 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_group_members.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_group_members.py @@ -38,7 +38,7 @@ class TestGetGroupMembers(BaseTest): UserService.add_user_to_group(testuser2, group_a) UserService.add_user_to_group(testuser3, group_b) - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( process_model_id="test_group/get_group_members", bpmn_file_name="get_group_members.bpmn", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_last_user_completing_task.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_last_user_completing_task.py index 5f0e40d3..fcd8b641 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_last_user_completing_task.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_last_user_completing_task.py @@ -23,7 +23,7 @@ class TestGetLastUserCompletingTask(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_sets_permission_correctly_on_human_task.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") assert initiator_user.principal is not None AuthorizationService.import_permissions_from_yaml_file() diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py index 31d2aa69..9595c948 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py @@ -54,7 +54,7 @@ class TestGetLocaltime(BaseTest): target_uri="/v1.0/process-groups", permission_names=["read", "create"], ) - self.create_process_group(client=client, user=initiator_user, process_group_id="test_group") + self.create_process_group_with_api(client=client, user=initiator_user, process_group_id="test_group") process_model = load_test_spec( process_model_id="test_group/get_localtime", bpmn_file_name="get_localtime.bpmn", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_process_initiator_user.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_process_initiator_user.py index 84ac7c27..60a93f9a 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_process_initiator_user.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_process_initiator_user.py @@ -23,7 +23,7 @@ class TestGetProcessInitiatorUser(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_sets_permission_correctly_on_human_task.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") assert initiator_user.principal is not None AuthorizationService.import_permissions_from_yaml_file() diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py index d0202a64..bf64b21d 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py @@ -24,7 +24,7 @@ class TestSaveProcessInstanceMetadata(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_save_process_instance_metadata.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( process_model_id="save_process_instance_metadata/save_process_instance_metadata", bpmn_file_name="save_process_instance_metadata.bpmn", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_message_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_message_service.py index 2d2f7baa..403c2323 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_message_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_message_service.py @@ -153,7 +153,7 @@ class TestMessageService(BaseTest): group_name: str = "test_group", ) -> None: process_group_id = group_name - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id) process_model = load_test_spec( "test_group/message", @@ -222,7 +222,7 @@ class TestMessageService(BaseTest): ) -> None: """Test_can_send_message_to_multiple_process_models.""" process_group_id = "test_group_multi" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id) process_model_sender = load_test_spec( "test_group/message_sender", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py index b81164c1..f229bdf7 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py @@ -33,7 +33,7 @@ class TestPermissions(BaseTest): ) -> None: """Test_user_can_be_given_permission_to_administer_process_group.""" process_group_id = "group-a" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id) load_test_spec( "group-a/timers_intermediate_catch_event", bpmn_file_name="timers_intermediate_catch_event.bpmn", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 9ccda1cb..37709197 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -72,7 +72,7 @@ class TestProcessInstanceProcessor(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_sets_permission_correctly_on_human_task.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") finance_user = self.find_or_create_user("testuser2") assert initiator_user.principal is not None @@ -140,7 +140,7 @@ class TestProcessInstanceProcessor(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_sets_permission_correctly_on_human_task_when_using_dict.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") finance_user_three = self.find_or_create_user("testuser3") finance_user_four = self.find_or_create_user("testuser4") @@ -264,7 +264,7 @@ class TestProcessInstanceProcessor(BaseTest): # with_db_and_bpmn_file_cleanup: None, # with_super_admin_user: UserModel, # ) -> None: - # self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + # self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") # initiator_user = self.find_or_create_user("initiator_user") # finance_user_three = self.find_or_create_user("testuser3") # assert initiator_user.principal is not None @@ -318,7 +318,7 @@ class TestProcessInstanceProcessor(BaseTest): with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") finance_user_three = self.find_or_create_user("testuser3") assert initiator_user.principal is not None @@ -472,7 +472,7 @@ class TestProcessInstanceProcessor(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_does_not_recreate_human_tasks_on_multiple_saves.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") finance_user_three = self.find_or_create_user("testuser3") assert initiator_user.principal is not None diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py index 4d8e1b5b..22f92111 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py @@ -14,7 +14,6 @@ from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) -from spiffworkflow_backend.services.process_model_service import ProcessModelService class TestProcessModel(BaseTest): @@ -33,7 +32,7 @@ class TestProcessModel(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_run_process_model_with_call_activities.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/call_activity_test", # bpmn_file_name="call_activity_test.bpmn", @@ -53,7 +52,7 @@ class TestProcessModel(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_run_process_model_with_call_activities.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/call_activity_nested", process_model_source_directory="call_activity_nested", @@ -84,7 +83,7 @@ class TestProcessModel(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_run_process_model_with_call_activities.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/call_activity_nested", process_model_source_directory="call_activity_nested", @@ -120,20 +119,7 @@ class TestProcessModel(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_run_process_model_with_call_activities.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") - process_model = load_test_spec( - "test_group/hello_world", - process_model_source_directory="nested-task-data-structure", - ) - ProcessModelService.update_process_model( - process_model, - { - "metadata_extraction_paths": [ - {"key": "awesome_var", "path": "outer.inner"}, - {"key": "invoice_number", "path": "invoice_number"}, - ] - }, - ) + process_model = self.create_process_model_with_metadata() process_instance = self.create_process_instance_from_process_model(process_model) processor = ProcessInstanceProcessor(process_instance) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model_service.py index 79d52888..0ff8bf46 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model_service.py @@ -19,7 +19,7 @@ class TestProcessModelService(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_update_specified_attributes.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/hello_world", bpmn_file_name="hello_world.bpmn", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py index e0b1535d..330d115f 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py @@ -23,7 +23,7 @@ class TestOpenFile(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_form_data_conversion_to_dot_dict.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/dangerous", bpmn_file_name="read_etc_passwd.bpmn", @@ -50,7 +50,7 @@ class TestImportModule(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_form_data_conversion_to_dot_dict.""" - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") process_model = load_test_spec( "test_group/dangerous", bpmn_file_name="read_env.bpmn", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py index 0fc3ee66..f5eef2e8 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py @@ -26,7 +26,7 @@ class TestScriptUnitTestRunner(BaseTest): app.config["THREAD_LOCAL_DATA"].process_instance_id = None process_group_id = "test_logging_spiff_logger" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id) process_model_id = "simple_script" process_model_identifier = f"{process_group_id}/{process_model_id}" load_test_spec( @@ -62,7 +62,7 @@ class TestScriptUnitTestRunner(BaseTest): app.config["THREAD_LOCAL_DATA"].process_instance_id = None process_group_id = "test_logging_spiff_logger" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id) process_model_id = "simple_script" process_model_identifier = f"{process_group_id}/{process_model_id}" @@ -99,7 +99,7 @@ class TestScriptUnitTestRunner(BaseTest): app.config["THREAD_LOCAL_DATA"].process_instance_id = None process_group_id = "script_with_unit_tests" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id) process_model_id = "script_with_unit_tests" process_model_identifier = f"{process_group_id}/{process_model_id}" @@ -132,7 +132,7 @@ class TestScriptUnitTestRunner(BaseTest): app.config["THREAD_LOCAL_DATA"].process_instance_id = None process_group_id = "script_with_unit_tests" - self.create_process_group(client, with_super_admin_user, process_group_id, process_group_id) + self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id) process_model_id = "script_with_unit_tests" process_model_identifier = f"{process_group_id}/{process_model_id}" From 9e91ff8f912a60b8ddf5c82302afad10730df7fd Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 27 Mar 2023 10:37:31 -0400 Subject: [PATCH 101/162] process children and tasks of parent subprocesses instead of looking for all tasks with a given state w/ burnettk --- .../services/process_instance_processor.py | 208 +++++++++--------- .../services/task_service.py | 4 +- .../services/workflow_execution_service.py | 28 ++- .../src/routes/ProcessInstanceShow.tsx | 33 +-- 4 files changed, 149 insertions(+), 124 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 722baa0d..99f60a2c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1,5 +1,7 @@ """Process_instance_processor.""" import _strptime # type: ignore +from sqlalchemy import or_ +from sqlalchemy import and_ import decimal import json import logging @@ -1263,109 +1265,109 @@ class ProcessInstanceProcessor: cls, process_instance: ProcessInstanceModel, to_task_guid: str, commit: Optional[bool] = False ) -> None: """Reset a process to an earlier state.""" - raise Exception("This feature to reset a process instance to a given task is currently unavaiable") - # cls.add_event_to_process_instance( - # process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid - # ) - # - # to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() - # if to_task_model is None: - # raise TaskNotFoundError( - # f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" - # ) - # - # parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes( - # to_task_model - # ) - # [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] - # [p.id for p in parent_bpmn_processes] - # tasks_to_update_query = db.session.query(TaskModel).filter( - # and_( - # or_( - # TaskModel.end_in_seconds > to_task_model.end_in_seconds, - # TaskModel.end_in_seconds.is_(None), # type: ignore - # ), - # TaskModel.process_instance_id == process_instance.id, - # # TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore - # ) - # ) - # tasks_to_update = tasks_to_update_query.all() - # - # # run all queries before making changes to task_model - # if commit: - # # tasks_to_delete_query = db.session.query(TaskModel).filter( - # # and_( - # # or_( - # # TaskModel.end_in_seconds > to_task_model.end_in_seconds, - # # TaskModel.end_in_seconds.is_not(None), # type: ignore - # # ), - # # TaskModel.process_instance_id == process_instance.id, - # # TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore - # # TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore - # # ) - # # ) - # # - # # tasks_to_delete = tasks_to_delete_query.all() - # # - # # # delete any later tasks from to_task_model and delete bpmn processes that may be - # # # link directly to one of those tasks. - # # tasks_to_delete_guids = [t.guid for t in tasks_to_delete] - # # tasks_to_delete_ids = [t.id for t in tasks_to_delete] - # # bpmn_processes_to_delete = BpmnProcessModel.query.filter( - # # BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore - # # ).order_by(BpmnProcessModel.id.desc()).all() - # # human_tasks_to_delete = HumanTaskModel.query.filter( - # # HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore - # # ).all() - # # - # # - # # import pdb; pdb.set_trace() - # # # ensure the correct order for foreign keys - # # for human_task_to_delete in human_tasks_to_delete: - # # db.session.delete(human_task_to_delete) - # # db.session.commit() - # # for task_to_delete in tasks_to_delete: - # # db.session.delete(task_to_delete) - # # db.session.commit() - # # for bpmn_process_to_delete in bpmn_processes_to_delete: - # # db.session.delete(bpmn_process_to_delete) - # # db.session.commit() - # - # related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() - # if related_human_task is not None: - # db.session.delete(related_human_task) - # - # tasks_to_update_ids = [t.id for t in tasks_to_update] - # human_tasks_to_delete = HumanTaskModel.query.filter( - # HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore - # ).all() - # for human_task_to_delete in human_tasks_to_delete: - # db.session.delete(human_task_to_delete) - # db.session.commit() - # - # for task_to_update in tasks_to_update: - # TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) - # - # parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first() - # if parent_task_model is None: - # raise TaskNotFoundError( - # f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" - # ) - # - # TaskService.reset_task_model( - # to_task_model, - # state="READY", - # json_data_hash=parent_task_model.json_data_hash, - # python_env_data_hash=parent_task_model.python_env_data_hash, - # commit=commit, - # ) - # for task_model in task_models_of_parent_bpmn_processes: - # TaskService.reset_task_model(task_model, state="WAITING", commit=commit) - # - # if commit: - # processor = ProcessInstanceProcessor(process_instance) - # processor.save() - # processor.suspend() + # raise Exception("This feature to reset a process instance to a given task is currently unavaiable") + cls.add_event_to_process_instance( + process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid + ) + + to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() + if to_task_model is None: + raise TaskNotFoundError( + f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" + ) + + parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes( + to_task_model + ) + [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] + [p.id for p in parent_bpmn_processes] + tasks_to_update_query = db.session.query(TaskModel).filter( + and_( + or_( + TaskModel.end_in_seconds > to_task_model.end_in_seconds, + TaskModel.end_in_seconds.is_(None), # type: ignore + ), + TaskModel.process_instance_id == process_instance.id, + # TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore + ) + ) + tasks_to_update = tasks_to_update_query.all() + + # run all queries before making changes to task_model + if commit: + # tasks_to_delete_query = db.session.query(TaskModel).filter( + # and_( + # or_( + # TaskModel.end_in_seconds > to_task_model.end_in_seconds, + # TaskModel.end_in_seconds.is_not(None), # type: ignore + # ), + # TaskModel.process_instance_id == process_instance.id, + # TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore + # TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore + # ) + # ) + # + # tasks_to_delete = tasks_to_delete_query.all() + # + # # delete any later tasks from to_task_model and delete bpmn processes that may be + # # link directly to one of those tasks. + # tasks_to_delete_guids = [t.guid for t in tasks_to_delete] + # tasks_to_delete_ids = [t.id for t in tasks_to_delete] + # bpmn_processes_to_delete = BpmnProcessModel.query.filter( + # BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore + # ).order_by(BpmnProcessModel.id.desc()).all() + # human_tasks_to_delete = HumanTaskModel.query.filter( + # HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore + # ).all() + # + # + # import pdb; pdb.set_trace() + # # ensure the correct order for foreign keys + # for human_task_to_delete in human_tasks_to_delete: + # db.session.delete(human_task_to_delete) + # db.session.commit() + # for task_to_delete in tasks_to_delete: + # db.session.delete(task_to_delete) + # db.session.commit() + # for bpmn_process_to_delete in bpmn_processes_to_delete: + # db.session.delete(bpmn_process_to_delete) + # db.session.commit() + + related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() + if related_human_task is not None: + db.session.delete(related_human_task) + + tasks_to_update_ids = [t.id for t in tasks_to_update] + human_tasks_to_delete = HumanTaskModel.query.filter( + HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore + ).all() + for human_task_to_delete in human_tasks_to_delete: + db.session.delete(human_task_to_delete) + db.session.commit() + + for task_to_update in tasks_to_update: + TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) + + parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first() + if parent_task_model is None: + raise TaskNotFoundError( + f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" + ) + + TaskService.reset_task_model( + to_task_model, + state="READY", + json_data_hash=parent_task_model.json_data_hash, + python_env_data_hash=parent_task_model.python_env_data_hash, + commit=commit, + ) + for task_model in task_models_of_parent_bpmn_processes: + TaskService.reset_task_model(task_model, state="WAITING", commit=commit) + + if commit: + processor = ProcessInstanceProcessor(process_instance) + processor.save() + processor.suspend() @staticmethod def get_parser() -> MyCustomParser: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index d3cf545c..4b86eefc 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -110,9 +110,9 @@ class TaskService: for sp_id, sp in top_level_workflow.subprocesses.items(): if sp == my_wf: my_sp = sp - my_sp_id = sp_id + my_sp_id = str(sp_id) break - return (str(my_sp_id), my_sp) + return (my_sp_id, my_sp) @classmethod def task_bpmn_process( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 4d933418..99ef4ee6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -1,5 +1,6 @@ import logging import time +from uuid import UUID from typing import Callable from typing import Optional @@ -67,6 +68,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): self.task_models: dict[str, TaskModel] = {} self.json_data_dicts: dict[str, JsonDataDict] = {} self.process_instance_events: dict[str, ProcessInstanceEventModel] = {} + self.last_completed_spiff_task: Optional[SpiffTask] = None def will_complete_task(self, spiff_task: SpiffTask) -> None: if self._should_update_task_model(): @@ -81,6 +83,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): raise Exception("Could not find cached current_task_start_in_seconds. This should never have happend") task_model.start_in_seconds = self.current_task_start_in_seconds task_model.end_in_seconds = time.time() + self.last_completed_spiff_task= spiff_task if self.secondary_engine_step_delegate: self.secondary_engine_step_delegate.did_complete_task(spiff_task) @@ -104,10 +107,27 @@ class TaskModelSavingDelegate(EngineStepDelegate): # TODO: also include children of the last task processed. This may help with task resets # if we have to set their states to FUTURE. # excludes FUTURE and COMPLETED. the others were required to get PP1 to go to completion. - for waiting_spiff_task in bpmn_process_instance.get_tasks( - TaskState.WAITING | TaskState.CANCELLED | TaskState.READY | TaskState.MAYBE | TaskState.LIKELY - ): - self._update_task_model_with_spiff_task(waiting_spiff_task) + # for waiting_spiff_task in bpmn_process_instance.get_tasks( + # TaskState.WAITING | TaskState.CANCELLED | TaskState.READY | TaskState.MAYBE | TaskState.LIKELY + # ): + # self._update_task_model_with_spiff_task(waiting_spiff_task) + if self.last_completed_spiff_task is not None: + self._process_spiff_task_children(self.last_completed_spiff_task) + self._process_spiff_task_parents(self.last_completed_spiff_task) + + def _process_spiff_task_children(self, spiff_task: SpiffTask) -> None: + for child_spiff_task in spiff_task.children: + self._update_task_model_with_spiff_task(child_spiff_task) + self._process_spiff_task_children(child_spiff_task) + + def _process_spiff_task_parents(self, spiff_task: SpiffTask) -> None: + (parent_subprocess_guid, _parent_subprocess) = TaskService.task_subprocess(spiff_task) + if parent_subprocess_guid is not None: + spiff_task_of_parent_subprocess = spiff_task.workflow._get_outermost_workflow().get_task(UUID(parent_subprocess_guid)) + + if spiff_task_of_parent_subprocess is not None: + self._update_task_model_with_spiff_task(spiff_task_of_parent_subprocess) + self._process_spiff_task_parents(spiff_task_of_parent_subprocess) def _should_update_task_model(self) -> bool: """We need to figure out if we have previously save task info on this process intance. diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index feaa4173..eaf90955 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -260,7 +260,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return !taskToTimeTravelTo; }; - const completionViewLink = (label: any, taskGuid: string) => { + const queryParams = () => { const processIdentifier = searchParams.get('process_identifier'); const callActivityTaskId = searchParams.get('bpmn_process_guid'); const queryParamArray = []; @@ -270,16 +270,19 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { if (callActivityTaskId) { queryParamArray.push(`bpmn_process_guid=${callActivityTaskId}`); } - let queryParams = ''; + let queryParamString = ''; if (queryParamArray.length > 0) { - queryParams = `?${queryParamArray.join('&')}`; + queryParamString = `?${queryParamArray.join('&')}`; } + return queryParamString; + }; + const completionViewLink = (label: any, taskGuid: string) => { return ( {label} @@ -287,7 +290,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; const returnToProcessInstance = () => { - window.location.href = processInstanceShowPageBaseUrl; + window.location.href = `${processInstanceShowPageBaseUrl}${queryParams()}`; }; const resetProcessInstance = () => { @@ -671,16 +674,16 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const canResetProcess = (_task: Task) => { - // disabling this feature for now - return false; - // return ( - // ability.can('POST', targetUris.processInstanceResetPath) && - // processInstance && - // processInstance.status === 'suspended' && - // task.state === 'READY' && - // !showingActiveTask() - // ); + const canResetProcess = (task: Task) => { + // // disabling this feature for now + // return false; + return ( + ability.can('POST', targetUris.processInstanceResetPath) && + processInstance && + processInstance.status === 'suspended' && + task.state === 'READY' && + !showingActiveTask() + ); }; const getEvents = (task: Task) => { From 02cfad91aff6a61002957d53db66e6c6ab8c6a30 Mon Sep 17 00:00:00 2001 From: burnettk Date: Mon, 27 Mar 2023 11:22:53 -0400 Subject: [PATCH 102/162] user cleanup --- .../realm_exports/spiffworkflow-realm.json | 40 ------------------- .../keycloak/test_user_lists/sartography | 2 - 2 files changed, 42 deletions(-) diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index c7781b81..27239bca 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -2352,26 +2352,6 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] - }, { - "id" : "058b60f8-799e-48b0-a2b7-2e65e7a35724", - "createdTimestamp" : 1675718484672, - "username" : "mike", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "mike@sartography.com", - "credentials" : [ { - "id" : "669f5421-843d-411d-9f24-1be41e545e52", - "type" : "password", - "createdDate" : 1675718484715, - "secretData" : "{\"value\":\"YILRiRdrsy8CA716ZQazpQOf7mpiXGaYnR26ra3pSjmHkZS9tsePTRwU2OIGPwbN1LKJcIzrpfEP7cVW2Lm17w==\",\"salt\":\"7mfD1X7Hns/5pPgHb9uZ1Q==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] }, { "id" : "97843876-e1b6-469a-bab4-f9bce4aa5936", "createdTimestamp" : 1678461819014, @@ -2395,26 +2375,6 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] - }, { - "id" : "9d23748e-23a7-4c48-956c-64da75871277", - "createdTimestamp" : 1675718484779, - "username" : "natalia", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "natalia@sartography.com", - "credentials" : [ { - "id" : "476024e5-62e4-48b6-afbb-cc2834fae4c7", - "type" : "password", - "createdDate" : 1675718484823, - "secretData" : "{\"value\":\"FfrpgES+XI2w4NRe1aBmolPFcERbEUDXZcFtUWucrbhBspQLYNaN2VLmeDRV0VcT47Bn8dqjU11ct64WDtffWA==\",\"salt\":\"7rZd3fqY54i1eoNyXCcZ1w==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] }, { "id" : "7f34beba-e1e1-458a-8d23-eb07d6e3800c", "createdTimestamp" : 1678126023154, diff --git a/spiffworkflow-backend/keycloak/test_user_lists/sartography b/spiffworkflow-backend/keycloak/test_user_lists/sartography index 1e280bae..391a41e3 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/sartography +++ b/spiffworkflow-backend/keycloak/test_user_lists/sartography @@ -11,5 +11,3 @@ kb@sartography.com kevin@sartography.com madhurya@sartography.com,160 madhurya@ymail.com,161 -mike@sartography.com -natalia@sartography.com From f0a12b98189b1a1079cdfa659cca5cce70a04665 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 27 Mar 2023 11:25:55 -0400 Subject: [PATCH 103/162] allow searching multiple words when searching for process models w/ burnettk --- .../src/components/ProcessModelSearch.tsx | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ProcessModelSearch.tsx b/spiffworkflow-frontend/src/components/ProcessModelSearch.tsx index b7debc6b..21847bbf 100644 --- a/spiffworkflow-frontend/src/components/ProcessModelSearch.tsx +++ b/spiffworkflow-frontend/src/components/ProcessModelSearch.tsx @@ -36,10 +36,17 @@ export default function ProcessModelSearch({ const shouldFilterProcessModel = (options: any) => { const processModel: ProcessModel = options.item; - const { inputValue } = options; - return getFullProcessModelLabel(processModel) - .toLowerCase() - .includes((inputValue || '').toLowerCase()); + let { inputValue } = options; + if (!inputValue) { + inputValue = ''; + } + const inputValueArray = inputValue.split(' '); + const processModelLowerCase = + getFullProcessModelLabel(processModel).toLowerCase(); + + return inputValueArray.every((i: any) => { + return processModelLowerCase.includes((i || '').toLowerCase()); + }); }; return ( Date: Mon, 27 Mar 2023 14:09:08 -0400 Subject: [PATCH 104/162] do not allow uploading a user to keycloak that matches admin user that the script is using w/ burnettk --- .../keycloak/bin/add_test_users_to_keycloak | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak b/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak index 4196add0..c53fe438 100755 --- a/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak +++ b/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak @@ -89,6 +89,12 @@ while read -r input_line; do echo "Importing: $input_line" user_email=$(awk -F ',' '{print $1}' <<<"$input_line") username=$(awk -F '@' '{print $1}' <<<"$user_email") + + if [[ "$username" == "$ADMIN_USERNAME" || "$user_email" == "$ADMIN_USERNAME" ]]; then + >&2 echo "ERROR: The user used as the admin user matches a user in the current import list. This should not happen. Comment out that user from the list or use a different admin user: ${ADMIN_USERNAME}" + exit 1 + fi + user_attribute_one=$(awk -F ',' '{print $2}' <<<"$input_line") http_code=$(add_user "$user_email" "$username" "$user_attribute_one") From e4fb9ad2f1921d6c0a76fd56b64c0fa0ca89d937 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 27 Mar 2023 15:42:26 -0400 Subject: [PATCH 105/162] moved common admin users to admin user script w/ burnettk --- spiffworkflow-backend/keycloak/test_user_lists/admin | 2 ++ spiffworkflow-backend/keycloak/test_user_lists/sartography | 3 --- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/spiffworkflow-backend/keycloak/test_user_lists/admin b/spiffworkflow-backend/keycloak/test_user_lists/admin index aa676cd9..a764901c 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/admin +++ b/spiffworkflow-backend/keycloak/test_user_lists/admin @@ -1,2 +1,4 @@ email,spiffworkflow-employeeid admin@spiffworkflow.org +jason@sartography.com +kevin@sartography.com diff --git a/spiffworkflow-backend/keycloak/test_user_lists/sartography b/spiffworkflow-backend/keycloak/test_user_lists/sartography index 391a41e3..17c5e688 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/sartography +++ b/spiffworkflow-backend/keycloak/test_user_lists/sartography @@ -1,13 +1,10 @@ email,spiffworkflow-employeeid -admin@spiffworkflow.org alex@sartography.com,111 dan@sartography.com,115 daniel@sartography.com elizabeth@sartography.com j@sartography.com -jason@sartography.com jon@sartography.com kb@sartography.com -kevin@sartography.com madhurya@sartography.com,160 madhurya@ymail.com,161 From a98667886964abedf78cf4be00a26700669489c4 Mon Sep 17 00:00:00 2001 From: jbirddog <100367399+jbirddog@users.noreply.github.com> Date: Mon, 27 Mar 2023 16:03:22 -0400 Subject: [PATCH 106/162] Handle the multiple single file upload widget case (#195) --- .../services/process_instance_service.py | 9 ++++++- .../unit/test_process_instance_service.py | 26 ++++++++++++++++++- 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index ed2ea918..37f77ac1 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -278,6 +278,9 @@ class ProcessInstanceService: for list_index, list_value in enumerate(value): if isinstance(list_value, str): yield (identifier, list_value, list_index) + if isinstance(list_value, dict) and len(list_value) == 1: + for v in list_value.values(): + yield (identifier, v, list_index) @classmethod def file_data_models_for_data( @@ -308,7 +311,11 @@ class ProcessInstanceService: if model.list_index is None: data[model.identifier] = digest_reference else: - data[model.identifier][model.list_index] = digest_reference + old_value = data[model.identifier][model.list_index] + new_value: Any = digest_reference + if isinstance(old_value, dict) and len(old_value) == 1: + new_value = {k: digest_reference for k in old_value.keys()} + data[model.identifier][model.list_index] = new_value @classmethod def save_file_data_and_replace_with_digest_references( diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_service.py index 436810cc..0c27a538 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_service.py @@ -89,7 +89,7 @@ class TestProcessInstanceService(BaseTest): self._check_sample_file_data_model("uploaded_files", 0, models[0]) self._check_sample_file_data_model("uploaded_files", 1, models[1]) - def test_can_create_file_data_models_for_fix_of_file_data_and_non_file_data_values( + def test_can_create_file_data_models_for_mix_of_file_data_and_non_file_data_values( self, app: Flask, with_db_and_bpmn_file_cleanup: None, @@ -122,6 +122,8 @@ class TestProcessInstanceService(BaseTest): ) -> None: data = { "not_a_file": "just a value", + "also_no_files": ["not a file", "also not a file"], + "still_no_files": [{"key": "value"}], } models = ProcessInstanceService.file_data_models_for_data(data, 111) @@ -189,3 +191,25 @@ class TestProcessInstanceService(BaseTest): ], "not_a_file3": "just a value3", } + + def test_can_create_file_data_models_for_mulitple_single_file_data_values( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + data = { + "File": [ + { + "supporting_files": self.SAMPLE_FILE_DATA, + }, + { + "supporting_files": self.SAMPLE_FILE_DATA, + }, + ], + } + models = ProcessInstanceService.file_data_models_for_data(data, 111) + + assert len(models) == 2 + self._check_sample_file_data_model("File", 0, models[0]) + self._check_sample_file_data_model("File", 1, models[1]) From d194ed2676eaa0cf18d30db7280104763440c67d Mon Sep 17 00:00:00 2001 From: Elizabeth Esswein Date: Mon, 27 Mar 2023 16:13:17 -0400 Subject: [PATCH 107/162] import parser from spiff package --- .../services/process_instance_processor.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 722baa0d..27ecf1b7 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -42,16 +42,14 @@ from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent # type: ignore from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ignore from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore -from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore -from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter # type: ignore from SpiffWorkflow.exceptions import WorkflowException # type: ignore from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore +from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ignore from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore - from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.bpmn_process_definition import ( @@ -107,8 +105,6 @@ from spiffworkflow_backend.services.workflow_execution_service import ( WorkflowExecutionService, ) -SPIFF_SPEC_CONFIG["task_specs"].append(BusinessRuleTaskConverter) - # Sorry about all this crap. I wanted to move this thing to another file, but # importing a bunch of types causes circular imports. @@ -1423,7 +1419,7 @@ class ProcessInstanceProcessor: @staticmethod def update_spiff_parser_with_all_process_dependency_files( - parser: BpmnDmnParser, + parser: SpiffBpmnParser, processed_identifiers: Optional[set[str]] = None, ) -> None: """Update_spiff_parser_with_all_process_dependency_files.""" From d9370f6608ac7bbb89809665132dfb28157091b3 Mon Sep 17 00:00:00 2001 From: burnettk Date: Mon, 27 Mar 2023 17:02:29 -0400 Subject: [PATCH 108/162] tests have been broken in ci, and this should fix it --- spiffworkflow-backend/poetry.lock | 2 +- .../src/spiffworkflow_backend/routes/tasks_controller.py | 2 +- .../services/process_instance_processor.py | 2 +- .../src/spiffworkflow_backend/services/task_service.py | 2 +- .../integration/test_logging_service.py | 2 +- .../unit/test_process_instance_processor.py | 6 +++--- 6 files changed, 8 insertions(+), 8 deletions(-) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index a8d70db3..b96b8d78 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1895,7 +1895,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "f162aac43af3af18d1a55186aeccea154fb8b05d" +resolved_reference = "3c3345c85dd7f3b7112ad04aaa6487abbd2e9414" [[package]] name = "SQLAlchemy" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 50a4402a..baebd04c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -636,7 +636,7 @@ def _get_spiff_task_from_process_instance( if processor is None: processor = ProcessInstanceProcessor(process_instance) task_uuid = uuid.UUID(task_guid) - spiff_task = processor.bpmn_process_instance.get_task(task_uuid) + spiff_task = processor.bpmn_process_instance.get_task_from_id(task_uuid) if spiff_task is None: raise ( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 722baa0d..7e3028fd 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1173,7 +1173,7 @@ class ProcessInstanceProcessor: """Mark the task complete optionally executing it.""" spiff_tasks_updated = {} start_in_seconds = time.time() - spiff_task = self.bpmn_process_instance.get_task(UUID(task_id)) + spiff_task = self.bpmn_process_instance.get_task_from_id(UUID(task_id)) event_type = ProcessInstanceEventType.task_skipped.value if execute: current_app.logger.info( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index d3cf545c..90cd2919 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -255,7 +255,7 @@ class TaskService: task_data_dict = task_properties.pop("data") state_int = task_properties["state"] - spiff_task = spiff_workflow.get_task(UUID(task_id)) + spiff_task = spiff_workflow.get_task_from_id(UUID(task_id)) task_model = TaskModel.query.filter_by(guid=task_id).first() if task_model is None: diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py index 41f30563..7890e156 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py @@ -114,7 +114,7 @@ class TestLoggingService(BaseTest): process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() processor = ProcessInstanceProcessor(process_instance) human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) headers = self.logged_in_headers(with_super_admin_user) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 37709197..4cacfe80 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -305,10 +305,10 @@ class TestProcessInstanceProcessor(BaseTest): # process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() # processor = ProcessInstanceProcessor(process_instance) # human_task_one = process_instance.active_human_tasks[0] - # spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + # spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) # human_task_one = process_instance.active_human_tasks[0] - # spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + # spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) def test_properly_saves_tasks_when_running( @@ -356,7 +356,7 @@ class TestProcessInstanceProcessor(BaseTest): process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() processor = ProcessInstanceProcessor(process_instance) human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) # recreate variables to ensure all bpmn json was recreated from scratch from the db From 422aaf09daa76028f8cf10120bd8fa5c7fa3c819 Mon Sep 17 00:00:00 2001 From: burnettk Date: Mon, 27 Mar 2023 20:22:12 -0400 Subject: [PATCH 109/162] task_guid is a str, thanks, typeguard --- .../src/spiffworkflow_backend/routes/tasks_controller.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index baebd04c..3d0eac40 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -175,7 +175,7 @@ def task_list_for_my_groups( def task_data_show( modified_process_model_identifier: str, process_instance_id: int, - task_guid: int = 0, + task_guid: str, ) -> flask.wrappers.Response: task_model = TaskModel.query.filter_by(guid=task_guid, process_instance_id=process_instance_id).first() if task_model is None: From 54b1a43494c1ebdcf9202a527034838cbebd4fc1 Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 28 Mar 2023 08:04:42 -0400 Subject: [PATCH 110/162] lint --- .../services/process_instance_processor.py | 4 ++-- .../services/workflow_execution_service.py | 8 +++++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 54ee33b2..76b3d9ac 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1,7 +1,5 @@ """Process_instance_processor.""" import _strptime # type: ignore -from sqlalchemy import or_ -from sqlalchemy import and_ import decimal import json import logging @@ -53,6 +51,8 @@ from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ign from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore +from sqlalchemy import and_ +from sqlalchemy import or_ from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 99ef4ee6..ac0f1a41 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -1,8 +1,8 @@ import logging import time -from uuid import UUID from typing import Callable from typing import Optional +from uuid import UUID from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore @@ -83,7 +83,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): raise Exception("Could not find cached current_task_start_in_seconds. This should never have happend") task_model.start_in_seconds = self.current_task_start_in_seconds task_model.end_in_seconds = time.time() - self.last_completed_spiff_task= spiff_task + self.last_completed_spiff_task = spiff_task if self.secondary_engine_step_delegate: self.secondary_engine_step_delegate.did_complete_task(spiff_task) @@ -123,7 +123,9 @@ class TaskModelSavingDelegate(EngineStepDelegate): def _process_spiff_task_parents(self, spiff_task: SpiffTask) -> None: (parent_subprocess_guid, _parent_subprocess) = TaskService.task_subprocess(spiff_task) if parent_subprocess_guid is not None: - spiff_task_of_parent_subprocess = spiff_task.workflow._get_outermost_workflow().get_task(UUID(parent_subprocess_guid)) + spiff_task_of_parent_subprocess = spiff_task.workflow._get_outermost_workflow().get_task( + UUID(parent_subprocess_guid) + ) if spiff_task_of_parent_subprocess is not None: self._update_task_model_with_spiff_task(spiff_task_of_parent_subprocess) From 796742d530375e5e4db974f3c7da220126e0f489 Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 28 Mar 2023 08:07:54 -0400 Subject: [PATCH 111/162] remove second, conflicting madhurya account --- spiffworkflow-backend/keycloak/test_user_lists/sartography | 1 - 1 file changed, 1 deletion(-) diff --git a/spiffworkflow-backend/keycloak/test_user_lists/sartography b/spiffworkflow-backend/keycloak/test_user_lists/sartography index 17c5e688..d5d5c7bf 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/sartography +++ b/spiffworkflow-backend/keycloak/test_user_lists/sartography @@ -7,4 +7,3 @@ j@sartography.com jon@sartography.com kb@sartography.com madhurya@sartography.com,160 -madhurya@ymail.com,161 From 4a9f4b8cb0524d81438c2fde6bb24e4c17aab4da Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 28 Mar 2023 08:22:17 -0400 Subject: [PATCH 112/162] do sonar for backend, too --- .github/workflows/backend_tests.yml | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/.github/workflows/backend_tests.yml b/.github/workflows/backend_tests.yml index 97cf7ca4..c09da5e5 100644 --- a/.github/workflows/backend_tests.yml +++ b/.github/workflows/backend_tests.yml @@ -184,9 +184,6 @@ jobs: steps: - name: Check out the repository uses: actions/checkout@v3.3.0 - with: - # Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud - fetch-depth: 0 - name: Set up Python uses: actions/setup-python@v4.2.0 with: @@ -205,9 +202,6 @@ jobs: steps: - name: Check out the repository uses: actions/checkout@v3.3.0 - with: - # Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud - fetch-depth: 0 - name: Checkout Samples uses: actions/checkout@v3 with: @@ -281,7 +275,7 @@ jobs: # so just skip everything but main if: github.ref_name == 'main' with: - projectBaseDir: spiffworkflow-frontend + projectBaseDir: spiffworkflow-backend env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} From d2f1ca1492fc3d3861a4dae16b6aaee2d0ca7062 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 28 Mar 2023 08:23:09 -0400 Subject: [PATCH 113/162] some more attempts to get reset working --- .../services/process_instance_processor.py | 13 ++ .../services/task_service.py | 3 +- .../unit/test_process_instance_processor.py | 162 ++++++++++++------ 3 files changed, 123 insertions(+), 55 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 99f60a2c..c08a4531 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1,4 +1,5 @@ """Process_instance_processor.""" +import copy import _strptime # type: ignore from sqlalchemy import or_ from sqlalchemy import and_ @@ -1346,7 +1347,12 @@ class ProcessInstanceProcessor: db.session.commit() for task_to_update in tasks_to_update: + # print(f"task_to_update: {task_to_update}") TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) + # if task_to_update.task_definition.bpmn_identifier != 'top_level_process_script_after_gate': + # TaskService.reset_task_model(task_to_update, state='FUTURE', commit=commit) + # else: + # TaskService.reset_task_model(task_to_update, state=task_to_update.state, commit=commit) parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first() if parent_task_model is None: @@ -1364,6 +1370,13 @@ class ProcessInstanceProcessor: for task_model in task_models_of_parent_bpmn_processes: TaskService.reset_task_model(task_model, state="WAITING", commit=commit) + bpmn_process = to_task_model.bpmn_process + properties_json = copy.copy(bpmn_process.properties_json) + properties_json['last_task'] = parent_task_model.guid + bpmn_process.properties_json = properties_json + db.session.add(bpmn_process) + db.session.commit() + if commit: processor = ProcessInstanceProcessor(process_instance) processor.save() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 4b86eefc..c9299925 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -1,3 +1,4 @@ +import copy import json from hashlib import sha256 from typing import Optional @@ -360,7 +361,7 @@ class TaskService: else: task_model.python_env_data_hash = python_env_data_hash - new_properties_json = task_model.properties_json + new_properties_json = copy.copy(task_model.properties_json) task_model.state = state task_model.start_in_seconds = None task_model.end_in_seconds = None diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 37709197..7ef7b40f 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -256,60 +256,114 @@ class TestProcessInstanceProcessor(BaseTest): assert spiff_task is not None assert spiff_task.state == TaskState.COMPLETED - # TODO: FIX resetting a process instance to a task - # def test_properly_resets_process_to_given_task( - # self, - # app: Flask, - # client: FlaskClient, - # with_db_and_bpmn_file_cleanup: None, - # with_super_admin_user: UserModel, - # ) -> None: - # self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") - # initiator_user = self.find_or_create_user("initiator_user") - # finance_user_three = self.find_or_create_user("testuser3") - # assert initiator_user.principal is not None - # assert finance_user_three.principal is not None - # AuthorizationService.import_permissions_from_yaml_file() - # - # finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() - # assert finance_group is not None - # - # process_model = load_test_spec( - # process_model_id="test_group/manual_task_with_subprocesses", - # process_model_source_directory="manual_task_with_subprocesses", - # ) - # process_instance = self.create_process_instance_from_process_model( - # process_model=process_model, user=initiator_user - # ) - # processor = ProcessInstanceProcessor(process_instance) - # processor.do_engine_steps(save=True) - # assert len(process_instance.active_human_tasks) == 1 - # initial_human_task_id = process_instance.active_human_tasks[0].id - # - # # save again to ensure we go attempt to process the human tasks again - # processor.save() - # - # assert len(process_instance.active_human_tasks) == 1 - # assert initial_human_task_id == process_instance.active_human_tasks[0].id - # - # processor = ProcessInstanceProcessor(process_instance) - # human_task_one = process_instance.active_human_tasks[0] - # spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( - # human_task_one.task_name, processor.bpmn_process_instance - # ) - # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - # - # processor.suspend() - # ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.id), commit=True) - # - # process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() - # processor = ProcessInstanceProcessor(process_instance) - # human_task_one = process_instance.active_human_tasks[0] - # spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) - # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - # human_task_one = process_instance.active_human_tasks[0] - # spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) - # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + def test_properly_resets_process_to_given_task( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") + initiator_user = self.find_or_create_user("initiator_user") + finance_user_three = self.find_or_create_user("testuser3") + assert initiator_user.principal is not None + assert finance_user_three.principal is not None + AuthorizationService.import_permissions_from_yaml_file() + + finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() + assert finance_group is not None + + process_model = load_test_spec( + process_model_id="test_group/manual_task", + process_model_source_directory="manual_task", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=initiator_user + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + assert len(process_instance.active_human_tasks) == 1 + initial_human_task_id = process_instance.active_human_tasks[0].id + + # save again to ensure we go attempt to process the human tasks again + processor.save() + + assert len(process_instance.active_human_tasks) == 1 + assert initial_human_task_id == process_instance.active_human_tasks[0].id + + processor = ProcessInstanceProcessor(process_instance) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( + human_task_one.task_name, processor.bpmn_process_instance + ) + + processor.suspend() + ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.parent.id), commit=True) + + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + processor = ProcessInstanceProcessor(process_instance) + processor.resume() + processor.do_engine_steps(save=True) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + assert process_instance.status == "complete" + + def test_properly_resets_process_to_given_task_with_call_activity( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") + initiator_user = self.find_or_create_user("initiator_user") + finance_user_three = self.find_or_create_user("testuser3") + assert initiator_user.principal is not None + assert finance_user_three.principal is not None + AuthorizationService.import_permissions_from_yaml_file() + + finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() + assert finance_group is not None + + process_model = load_test_spec( + process_model_id="test_group/manual_task_with_subprocesses", + process_model_source_directory="manual_task_with_subprocesses", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=initiator_user + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + import pdb; pdb.set_trace() + assert len(process_instance.active_human_tasks) == 1 + initial_human_task_id = process_instance.active_human_tasks[0].id + + # save again to ensure we go attempt to process the human tasks again + processor.save() + + assert len(process_instance.active_human_tasks) == 1 + assert initial_human_task_id == process_instance.active_human_tasks[0].id + + processor = ProcessInstanceProcessor(process_instance) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( + human_task_one.task_name, processor.bpmn_process_instance + ) + + processor.suspend() + ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.parent.id), commit=True) + import pdb; pdb.set_trace() + + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + processor = ProcessInstanceProcessor(process_instance) + processor.resume() + processor.do_engine_steps(save=True) + import pdb; pdb.set_trace() + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + assert process_instance.status == "complete" def test_properly_saves_tasks_when_running( self, From 3ea5d9a0556ca7782a89ec14500bb65b4e2fe412 Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 28 Mar 2023 10:05:29 -0400 Subject: [PATCH 114/162] fix all sonar bugs --- .../routes/openid_blueprint/static/login.css | 2 +- .../routes/openid_blueprint/templates/login.html | 4 ++-- .../services/process_instance_service.py | 3 +-- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/login.css b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/login.css index 15b093f6..94d1e057 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/login.css +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/login.css @@ -2,7 +2,7 @@ margin: 0; padding: 0; background-color:white; - font-family: 'Arial'; + font-family: 'Arial, sans-serif'; } header { width: 100%; diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/templates/login.html b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/templates/login.html index 858355c3..815275d2 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/templates/login.html +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/templates/login.html @@ -1,12 +1,12 @@ - + Login Form
- + Small SpiffWorkflow logo

Login

diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index 37f77ac1..711ea0d8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -157,8 +157,7 @@ class ProcessInstanceService: # navigation = processor.bpmn_process_instance.get_deep_nav_list() # ProcessInstanceService.update_navigation(navigation, processor) process_model_service = ProcessModelService() - process_model = process_model_service.get_process_model(processor.process_model_identifier) - process_model.display_name if process_model else "" + process_model_service.get_process_model(processor.process_model_identifier) process_instance_api = ProcessInstanceApi( id=processor.get_process_instance_id(), status=processor.get_status(), From e20ada5d496b4c9798ee9de07f5337ab7f01ddbe Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 28 Mar 2023 10:33:53 -0400 Subject: [PATCH 115/162] get backend working with new spiff --- spiffworkflow-backend/poetry.lock | 2 +- .../services/process_instance_processor.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index b96b8d78..ef67a53e 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1895,7 +1895,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "3c3345c85dd7f3b7112ad04aaa6487abbd2e9414" +resolved_reference = "62454c99c3a711c38f4249a3b5e7215d42037d72" [[package]] name = "SQLAlchemy" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 7e3028fd..6d236ef6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1779,7 +1779,7 @@ class ProcessInstanceProcessor: ) task_model.start_in_seconds = time.time() - self.bpmn_process_instance.complete_task_from_id(spiff_task.id) + self.bpmn_process_instance.run_task_from_id(spiff_task.id) task_model.end_in_seconds = time.time() human_task.completed_by_user_id = user.id From dc005c36dc2e489a64d11a4e23495fc478ed8c3b Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 28 Mar 2023 11:22:42 -0400 Subject: [PATCH 116/162] kill commented out code --- spiffworkflow-backend/conftest.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/spiffworkflow-backend/conftest.py b/spiffworkflow-backend/conftest.py index 9d05dfe5..4e902e22 100644 --- a/spiffworkflow-backend/conftest.py +++ b/spiffworkflow-backend/conftest.py @@ -19,8 +19,6 @@ from spiffworkflow_backend.services.process_instance_service import ( ) from spiffworkflow_backend.services.process_model_service import ProcessModelService -# from tests.spiffworkflow_backend.helpers.test_data import load_test_spec - # We need to call this before importing spiffworkflow_backend # otherwise typeguard cannot work. hence the noqa: E402 From 293d3a9295fca062a3f89bdc58f95602d70085ec Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 28 Mar 2023 13:41:58 -0400 Subject: [PATCH 117/162] notion 183: Home Page Refresh, keep it refreshing for 4 hours instead of 10 minutes --- .../src/components/ProcessInstanceListTable.tsx | 9 ++++----- .../src/components/TaskListTable.tsx | 10 +++++++--- spiffworkflow-frontend/src/helpers.tsx | 8 ++++++++ spiffworkflow-frontend/src/routes/MyTasks.tsx | 10 +++++++--- 4 files changed, 26 insertions(+), 11 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 870dc440..81b644d1 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -36,6 +36,8 @@ import { getProcessModelFullIdentifierFromSearchParams, modifyProcessIdentifierForPathParam, refreshAtInterval, + REFRESH_INTERVAL_SECONDS, + REFRESH_TIMEOUT_SECONDS, } from '../helpers'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; @@ -68,9 +70,6 @@ import useAPIError from '../hooks/UseApiError'; import { usePermissionFetcher } from '../hooks/PermissionService'; import { Can } from '../contexts/Can'; -const REFRESH_INTERVAL = 5; -const REFRESH_TIMEOUT = 600; - type OwnProps = { filtersEnabled?: boolean; processModelFullIdentifier?: string; @@ -389,8 +388,8 @@ export default function ProcessInstanceListTable({ checkFiltersAndRun(); if (autoReload) { return refreshAtInterval( - REFRESH_INTERVAL, - REFRESH_TIMEOUT, + REFRESH_INTERVAL_SECONDS, + REFRESH_TIMEOUT_SECONDS, checkFiltersAndRun ); } diff --git a/spiffworkflow-frontend/src/components/TaskListTable.tsx b/spiffworkflow-frontend/src/components/TaskListTable.tsx index 1951b4cc..b6901f73 100644 --- a/spiffworkflow-frontend/src/components/TaskListTable.tsx +++ b/spiffworkflow-frontend/src/components/TaskListTable.tsx @@ -9,14 +9,14 @@ import { getPageInfoFromSearchParams, modifyProcessIdentifierForPathParam, refreshAtInterval, + REFRESH_INTERVAL_SECONDS, + REFRESH_TIMEOUT_SECONDS, } from '../helpers'; import HttpService from '../services/HttpService'; import { PaginationObject, ProcessInstanceTask } from '../interfaces'; import TableCellWithTimeAgoInWords from './TableCellWithTimeAgoInWords'; const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5; -const REFRESH_INTERVAL = 5; -const REFRESH_TIMEOUT = 600; type OwnProps = { apiPath: string; @@ -89,7 +89,11 @@ export default function TaskListTable({ }; getTasks(); if (autoReload) { - return refreshAtInterval(REFRESH_INTERVAL, REFRESH_TIMEOUT, getTasks); + return refreshAtInterval( + REFRESH_INTERVAL_SECONDS, + REFRESH_TIMEOUT_SECONDS, + getTasks + ); } return undefined; }, [ diff --git a/spiffworkflow-frontend/src/helpers.tsx b/spiffworkflow-frontend/src/helpers.tsx index 88ab1522..273e12ae 100644 --- a/spiffworkflow-frontend/src/helpers.tsx +++ b/spiffworkflow-frontend/src/helpers.tsx @@ -270,3 +270,11 @@ export const encodeBase64 = (data: string) => { export const decodeBase64 = (data: string) => { return Buffer.from(data, 'base64').toString('ascii'); }; + +const MINUTES_IN_HOUR = 60; +const SECONDS_IN_MINUTE = 60; +const SECONDS_IN_HOUR = MINUTES_IN_HOUR * SECONDS_IN_MINUTE; +const FOUR_HOURS_IN_SECONDS = SECONDS_IN_HOUR * 4; + +export const REFRESH_INTERVAL_SECONDS = 5; +export const REFRESH_TIMEOUT_SECONDS = FOUR_HOURS_IN_SECONDS; diff --git a/spiffworkflow-frontend/src/routes/MyTasks.tsx b/spiffworkflow-frontend/src/routes/MyTasks.tsx index 3daaaef6..810c811b 100644 --- a/spiffworkflow-frontend/src/routes/MyTasks.tsx +++ b/spiffworkflow-frontend/src/routes/MyTasks.tsx @@ -8,6 +8,8 @@ import { getPageInfoFromSearchParams, modifyProcessIdentifierForPathParam, refreshAtInterval, + REFRESH_INTERVAL_SECONDS, + REFRESH_TIMEOUT_SECONDS, } from '../helpers'; import HttpService from '../services/HttpService'; import { @@ -19,8 +21,6 @@ import { import ProcessInstanceRun from '../components/ProcessInstanceRun'; const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5; -const REFRESH_INTERVAL = 5; -const REFRESH_TIMEOUT = 600; export default function MyTasks() { const [searchParams] = useSearchParams(); @@ -46,7 +46,11 @@ export default function MyTasks() { }; getTasks(); - refreshAtInterval(REFRESH_INTERVAL, REFRESH_TIMEOUT, getTasks); + refreshAtInterval( + REFRESH_INTERVAL_SECONDS, + REFRESH_TIMEOUT_SECONDS, + getTasks + ); }, [searchParams]); const processInstanceRunResultTag = () => { From c86ea0349c939776b0fea952761e926ae5cf5a82 Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 28 Mar 2023 14:51:08 -0400 Subject: [PATCH 118/162] fix four of five safety issues --- spiffworkflow-backend/poetry.lock | 52 ++++++++++++++-------------- spiffworkflow-backend/pyproject.toml | 1 + 2 files changed, 27 insertions(+), 26 deletions(-) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index ef67a53e..cdbabe03 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -493,7 +493,7 @@ python-versions = ">=3.7" name = "dparse" version = "0.6.2" description = "A parser for Python dependency files" -category = "dev" +category = "main" optional = false python-versions = ">=3.5" @@ -824,9 +824,9 @@ python-versions = ">=3.6" smmap = ">=3.0.1,<6" [[package]] -name = "GitPython" -version = "3.1.29" -description = "GitPython is a python library used to interact with Git repositories" +name = "gitpython" +version = "3.1.31" +description = "GitPython is a Python library used to interact with Git repositories" category = "dev" optional = false python-versions = ">=3.7" @@ -1582,7 +1582,7 @@ docutils = ">=0.11,<1.0" name = "ruamel.yaml" version = "0.17.21" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -category = "dev" +category = "main" optional = false python-versions = ">=3" @@ -1597,22 +1597,22 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] name = "ruamel-yaml-clib" version = "0.2.7" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -category = "dev" +category = "main" optional = false python-versions = ">=3.5" [[package]] name = "safety" -version = "2.3.1" +version = "2.3.5" description = "Checks installed dependencies for known vulnerabilities and licenses." -category = "dev" +category = "main" optional = false python-versions = "*" [package.dependencies] Click = ">=8.0.2" dparse = ">=0.6.2" -packaging = ">=21.0" +packaging = ">=21.0,<22.0" requests = "*" "ruamel.yaml" = ">=0.17.21" setuptools = ">=19.3" @@ -1660,15 +1660,15 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "65.5.0" +version = "67.6.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -1982,7 +1982,7 @@ python-versions = ">=3.6.1" name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" @@ -2200,8 +2200,8 @@ optional = false python-versions = "*" [[package]] -name = "Werkzeug" -version = "2.2.2" +name = "werkzeug" +version = "2.2.3" description = "The comprehensive WSGI web application library." category = "main" optional = false @@ -2274,7 +2274,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.12" -content-hash = "b9ea32912509637f1378d060771de7548d93953aa3db12d6a48098f7dc15205f" +content-hash = "ae016b86fb6700dd70f5724d85a573dc39d29e4778b398eb5e82edb41070cd89" [metadata.files] alabaster = [ @@ -2672,9 +2672,9 @@ gitdb = [ {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, ] -GitPython = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, +gitpython = [ + {file = "GitPython-3.1.31-py3-none-any.whl", hash = "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d"}, + {file = "GitPython-3.1.31.tar.gz", hash = "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573"}, ] greenlet = [ {file = "greenlet-2.0.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:9ed358312e63bf683b9ef22c8e442ef6c5c02973f0c2a939ec1d7b50c974015c"}, @@ -3393,16 +3393,16 @@ ruamel-yaml-clib = [ {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"}, ] safety = [ - {file = "safety-2.3.1-py3-none-any.whl", hash = "sha256:8f098d12b607db2756886280e85c28ece8db1bba4f45fc5f981f4663217bd619"}, - {file = "safety-2.3.1.tar.gz", hash = "sha256:6e6fcb7d4e8321098cf289f59b65051cafd3467f089c6e57c9f894ae32c23b71"}, + {file = "safety-2.3.5-py3-none-any.whl", hash = "sha256:2227fcac1b22b53c1615af78872b48348661691450aa25d6704a5504dbd1f7e2"}, + {file = "safety-2.3.5.tar.gz", hash = "sha256:a60c11f8952f412cbb165d70cb1f673a3b43a2ba9a93ce11f97e6a4de834aa3a"}, ] sentry-sdk = [ {file = "sentry-sdk-1.16.0.tar.gz", hash = "sha256:a900845bd78c263d49695d48ce78a4bce1030bbd917e0b6cc021fc000c901113"}, {file = "sentry_sdk-1.16.0-py2.py3-none-any.whl", hash = "sha256:633edefead34d976ff22e7edc367cdf57768e24bc714615ccae746d9d91795ae"}, ] setuptools = [ - {file = "setuptools-65.5.0-py3-none-any.whl", hash = "sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356"}, - {file = "setuptools-65.5.0.tar.gz", hash = "sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17"}, + {file = "setuptools-67.6.1-py3-none-any.whl", hash = "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078"}, + {file = "setuptools-67.6.1.tar.gz", hash = "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a"}, ] simplejson = [ {file = "simplejson-3.17.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a"}, @@ -3685,9 +3685,9 @@ wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, ] -Werkzeug = [ - {file = "Werkzeug-2.2.2-py3-none-any.whl", hash = "sha256:f979ab81f58d7318e064e99c4506445d60135ac5cd2e177a2de0089bfd4c9bd5"}, - {file = "Werkzeug-2.2.2.tar.gz", hash = "sha256:7ea2d48322cc7c0f8b3a215ed73eabd7b5d75d0b50e31ab006286ccff9e00b8f"}, +werkzeug = [ + {file = "Werkzeug-2.2.3-py3-none-any.whl", hash = "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612"}, + {file = "Werkzeug-2.2.3.tar.gz", hash = "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe"}, ] wrapt = [ {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index 103de8c8..5758b46a 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -75,6 +75,7 @@ flask-jwt-extended = "^4.4.4" pylint = "^2.15.10" flask-simple-crypt = "^0.3.3" cryptography = "^39.0.2" +safety = "^2.3.5" [tool.poetry.dev-dependencies] From 891cae52f1def0ca36555be068cf4e42131c5561 Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 28 Mar 2023 14:58:16 -0400 Subject: [PATCH 119/162] downgrade setuptools closer to where it was to avoid deprecation warnings --- spiffworkflow-backend/poetry.lock | 10 +++++----- spiffworkflow-backend/pyproject.toml | 7 +++++++ 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index cdbabe03..06d02301 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1660,14 +1660,14 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "67.6.1" +version = "65.5.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] @@ -2274,7 +2274,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.12" -content-hash = "ae016b86fb6700dd70f5724d85a573dc39d29e4778b398eb5e82edb41070cd89" +content-hash = "723a73cc0d56d4bfe4a791356cf36675a6bcf289a4959abc77c5512738b88d51" [metadata.files] alabaster = [ @@ -3401,8 +3401,8 @@ sentry-sdk = [ {file = "sentry_sdk-1.16.0-py2.py3-none-any.whl", hash = "sha256:633edefead34d976ff22e7edc367cdf57768e24bc714615ccae746d9d91795ae"}, ] setuptools = [ - {file = "setuptools-67.6.1-py3-none-any.whl", hash = "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078"}, - {file = "setuptools-67.6.1.tar.gz", hash = "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a"}, + {file = "setuptools-65.5.1-py3-none-any.whl", hash = "sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31"}, + {file = "setuptools-65.5.1.tar.gz", hash = "sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f"}, ] simplejson = [ {file = "simplejson-3.17.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a"}, diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index 5758b46a..46ebc435 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -39,6 +39,13 @@ pytest-flask = "^1.2.0" pytest-flask-sqlalchemy = "^1.1.0" psycopg2 = "^2.9.3" typing-extensions = "^4.4.0" + +# pinned to higher than 65.5.0 because of a vulnerability +# and to lower than 67 because i didn't feel like addressing +# new deprecation warnings. we don't need this library explicitly, +# but at one time it was pulled in by various libs we depend on. +setuptools = "^65.5.1" + connexion = {extras = [ "swagger-ui",], version = "^2"} lxml = "^4.9.1" marshmallow-enum = "^1.5.1" From 8bd7716d4c5067ff5e0e6e6ff02981e81dcc4c7b Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 28 Mar 2023 15:07:31 -0400 Subject: [PATCH 120/162] some more debugging --- .../services/process_instance_processor.py | 1 + .../services/task_service.py | 21 ++--- .../services/workflow_execution_service.py | 19 ++++- .../manual_task_with_subprocesses.bpmn | 83 +++++++++++-------- .../unit/test_process_instance_processor.py | 25 +++--- 5 files changed, 82 insertions(+), 67 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 0ec6c106..7d33fc4e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1349,6 +1349,7 @@ class ProcessInstanceProcessor: for task_to_update in tasks_to_update: # print(f"task_to_update: {task_to_update}") TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) + # TaskService.reset_task_model(task_to_update, state=task_to_update.state, commit=commit) # if task_to_update.task_definition.bpmn_identifier != 'top_level_process_script_after_gate': # TaskService.reset_task_model(task_to_update, state='FUTURE', commit=commit) # else: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index be6407b5..a67a7755 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -59,6 +59,8 @@ class TaskService: It also returns the relating json_data object so they can be imported later. """ new_properties_json = serializer.task_to_dict(spiff_task) + if new_properties_json["task_spec"] == "Start": + new_properties_json["parent"] = None spiff_task_data = new_properties_json.pop("data") python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer) task_model.properties_json = new_properties_json @@ -251,11 +253,7 @@ class TaskService: # bpmn process defintion so let's avoid using it. if task_properties["task_spec"] == "Root": continue - if task_properties["task_spec"] == "Start": - task_properties["parent"] = None - task_data_dict = task_properties.pop("data") - state_int = task_properties["state"] spiff_task = spiff_workflow.get_task_from_id(UUID(task_id)) task_model = TaskModel.query.filter_by(guid=task_id).first() @@ -266,23 +264,14 @@ class TaskService: spiff_task, bpmn_definition_to_task_definitions_mappings, ) - task_model.state = TaskStateNames[state_int] - task_model.properties_json = task_properties - new_task_models[task_model.guid] = task_model - json_data_dict = TaskService.update_task_data_on_task_model( - task_model, task_data_dict, "json_data_hash" - ) + json_data_dict, python_env_dict = cls.update_task_model(task_model, spiff_task, serializer) + + new_task_models[task_model.guid] = task_model if json_data_dict is not None: new_json_data_dicts[json_data_dict["hash"]] = json_data_dict - - python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer) - python_env_dict = TaskService.update_task_data_on_task_model( - task_model, python_env_data_dict, "python_env_data_hash" - ) if python_env_dict is not None: new_json_data_dicts[python_env_dict["hash"]] = python_env_dict - return (bpmn_process, new_task_models, new_json_data_dicts) @classmethod diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index ac0f1a41..cc6a3a02 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -11,6 +11,8 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from spiffworkflow_backend.exceptions.api_error import ApiError +from spiffworkflow_backend.models import task_definition +from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.message_instance import MessageInstanceModel from spiffworkflow_backend.models.message_instance_correlation import ( @@ -19,7 +21,8 @@ from spiffworkflow_backend.models.message_instance_correlation import ( from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType -from spiffworkflow_backend.models.task import TaskModel # noqa: F401 +from spiffworkflow_backend.models.task import TaskModel +from spiffworkflow_backend.models.task_definition import TaskDefinitionModel # noqa: F401 from spiffworkflow_backend.services.assertion_service import safe_assertion from spiffworkflow_backend.services.process_instance_lock_service import ( ProcessInstanceLockService, @@ -93,6 +96,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): failing_spiff_task = script_engine.failing_spiff_task self._update_task_model_with_spiff_task(failing_spiff_task, task_failed=True) + # import pdb; pdb.set_trace() db.session.bulk_save_objects(self.task_models.values()) db.session.bulk_save_objects(self.process_instance_events.values()) @@ -123,7 +127,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): def _process_spiff_task_parents(self, spiff_task: SpiffTask) -> None: (parent_subprocess_guid, _parent_subprocess) = TaskService.task_subprocess(spiff_task) if parent_subprocess_guid is not None: - spiff_task_of_parent_subprocess = spiff_task.workflow._get_outermost_workflow().get_task( + spiff_task_of_parent_subprocess = spiff_task.workflow._get_outermost_workflow().get_task_from_id( UUID(parent_subprocess_guid) ) @@ -156,6 +160,17 @@ class TaskModelSavingDelegate(EngineStepDelegate): bpmn_process_json_data = TaskService.update_task_data_on_bpmn_process( bpmn_process or task_model.bpmn_process, spiff_task.workflow.data ) + # stp = False + # for ntm in new_task_models.values(): + # td = TaskDefinitionModel.query.filter_by(id=ntm.task_definition_id).first() + # if td.bpmn_identifier == 'Start': + # # import pdb; pdb.set_trace() + # stp = True + # print("HEY") + + # if stp: + # # import pdb; pdb.set_trace() + # print("HEY2") self.task_models.update(new_task_models) self.json_data_dicts.update(new_json_data_dicts) json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self.serializer) diff --git a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn index f49f99cd..d2b1d94e 100644 --- a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn @@ -7,12 +7,12 @@ Flow_1ygcsbt - + ## Hello - Flow_1fktmf7 Flow_1t9ywmr + Flow_0q30935 Flow_09gjylo @@ -21,8 +21,8 @@ Flow_1fktmf7 set_in_top_level_script = 1 - - + + Flow_09gjylo Flow_0yxus36 @@ -65,34 +65,43 @@ except: Flow_1ygcsbt set_top_level_process_script_after_gate = 1 - + + + + Flow_1fktmf7 + Flow_0q30935 + - - - - - - - - - - - - - - + + - + + + + + + + + + + + + + + + + + @@ -100,33 +109,37 @@ except: - + - - + + - - + + - - + + - - + + - - - - + + + + - - + + + + + + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index b85bbec3..0675394b 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -1,5 +1,6 @@ """Test_process_instance_processor.""" from uuid import UUID +import json import pytest from flask import g @@ -305,7 +306,7 @@ class TestProcessInstanceProcessor(BaseTest): processor.resume() processor.do_engine_steps(save=True) human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) assert process_instance.status == "complete" @@ -335,34 +336,30 @@ class TestProcessInstanceProcessor(BaseTest): ) processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - import pdb; pdb.set_trace() + with open("before_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2)) assert len(process_instance.active_human_tasks) == 1 initial_human_task_id = process_instance.active_human_tasks[0].id - - # save again to ensure we go attempt to process the human tasks again - processor.save() - assert len(process_instance.active_human_tasks) == 1 assert initial_human_task_id == process_instance.active_human_tasks[0].id - processor = ProcessInstanceProcessor(process_instance) human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( - human_task_one.task_name, processor.bpmn_process_instance - ) + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + processor.suspend() - ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.parent.id), commit=True) - import pdb; pdb.set_trace() + ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.id), commit=True) process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() processor = ProcessInstanceProcessor(process_instance) + with open("after_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2)) processor.resume() processor.do_engine_steps(save=True) - import pdb; pdb.set_trace() human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + + import pdb; pdb.set_trace() assert process_instance.status == "complete" def test_properly_saves_tasks_when_running( From f6f0677d34e8ebc38f5e3f2c78858ec3a28e9f38 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 28 Mar 2023 15:56:00 -0400 Subject: [PATCH 121/162] updated sqlalchemy to 2.0 due to safety complaints w/ burnettk --- spiffworkflow-backend/conftest.py | 4 +- spiffworkflow-backend/poetry.lock | 1892 +++++++++-------- spiffworkflow-backend/pyproject.toml | 4 +- .../src/spiffworkflow_backend/__init__.py | 5 +- .../models/process_instance.py | 1 + .../models/process_instance_report.py | 3 +- .../src/spiffworkflow_backend/models/task.py | 1 + .../process_instance_report_service.py | 9 +- .../services/workflow_execution_service.py | 5 - 9 files changed, 996 insertions(+), 928 deletions(-) diff --git a/spiffworkflow-backend/conftest.py b/spiffworkflow-backend/conftest.py index 4e902e22..df002ff4 100644 --- a/spiffworkflow-backend/conftest.py +++ b/spiffworkflow-backend/conftest.py @@ -45,8 +45,8 @@ def app() -> Flask: def with_db_and_bpmn_file_cleanup() -> None: """Do it cleanly!""" meta = db.metadata - db.session.execute(db.update(BpmnProcessModel, values={"top_level_process_id": None})) - db.session.execute(db.update(BpmnProcessModel, values={"direct_parent_process_id": None})) + db.session.execute(db.update(BpmnProcessModel).values(top_level_process_id=None)) + db.session.execute(db.update(BpmnProcessModel).values(direct_parent_process_id=None)) for table in reversed(meta.sorted_tables): db.session.execute(table.delete()) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index 06d02301..7181d042 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1,14 +1,14 @@ [[package]] name = "alabaster" -version = "0.7.12" +version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "alembic" -version = "1.8.1" +version = "1.10.2" description = "A database migration tool for SQLAlchemy." category = "main" optional = false @@ -17,6 +17,7 @@ python-versions = ">=3.7" [package.dependencies] Mako = "*" SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" [package.extras] tz = ["python-dateutil"] @@ -45,11 +46,11 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] [[package]] name = "apscheduler" -version = "3.9.1.post1" +version = "3.10.1" description = "In-process task scheduler with Cron-like capabilities" category = "main" optional = false -python-versions = "!=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=3.6" [package.dependencies] pytz = "*" @@ -58,21 +59,20 @@ six = ">=1.4.0" tzlocal = ">=2.0,<3.0.0 || >=4.0.0" [package.extras] -asyncio = ["trollius"] doc = ["sphinx", "sphinx-rtd-theme"] gevent = ["gevent"] mongodb = ["pymongo (>=3.0)"] redis = ["redis (>=3.0)"] rethinkdb = ["rethinkdb (>=2.4.0)"] -sqlalchemy = ["sqlalchemy (>=0.8)"] -testing = ["mock", "pytest", "pytest-asyncio", "pytest-asyncio (<0.6)", "pytest-cov", "pytest-tornado5"] +sqlalchemy = ["sqlalchemy (>=1.4)"] +testing = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-tornado5"] tornado = ["tornado (>=4.3)"] twisted = ["twisted"] zookeeper = ["kazoo"] [[package]] name = "astroid" -version = "2.13.3" +version = "2.15.1" description = "An abstract syntax tree for Python with inference support." category = "main" optional = false @@ -88,28 +88,26 @@ wrapt = [ [[package]] name = "attrs" -version = "22.1.0" +version = "22.2.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=3.5" - -[package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] - -[[package]] -name = "Babel" -version = "2.10.3" -description = "Internationalization utilities" -category = "main" -optional = false python-versions = ">=3.6" -[package.dependencies] -pytz = ">=2015.7" +[package.extras] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] + +[[package]] +name = "babel" +version = "2.12.1" +description = "Internationalization utilities" +category = "main" +optional = false +python-versions = ">=3.7" [[package]] name = "bandit" @@ -144,7 +142,7 @@ typecheck = ["mypy"] [[package]] name = "beautifulsoup4" -version = "4.11.1" +version = "4.12.0" description = "Screen-scraping library" category = "dev" optional = false @@ -167,7 +165,7 @@ python-versions = "*" [[package]] name = "black" -version = "22.10.0" +version = "22.12.0" description = "The uncompromising code formatter." category = "dev" optional = false @@ -275,14 +273,11 @@ python-versions = ">=3.6.1" [[package]] name = "charset-normalizer" -version = "2.1.1" +version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=3.6.0" - -[package.extras] -unicode-backport = ["unicodedata2"] +python-versions = ">=3.7.0" [[package]] name = "classify-imports" @@ -355,11 +350,11 @@ PyYAML = ">=3.11" [[package]] name = "colorama" -version = "0.4.5" +version = "0.4.6" description = "Cross-platform colored terminal text." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" [[package]] name = "configparser" @@ -375,7 +370,7 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec [[package]] name = "connexion" -version = "2.14.1" +version = "2.14.2" description = "Connexion - API first applications with OpenAPI/Swagger and Flask" category = "main" optional = false @@ -383,7 +378,7 @@ python-versions = ">=3.6" [package.dependencies] clickclick = ">=1.2,<21" -flask = ">=1.0.4,<3" +flask = ">=1.0.4,<2.3" inflection = ">=0.3.1,<0.6" itsdangerous = ">=0.24" jsonschema = ">=2.5.1,<5" @@ -391,14 +386,14 @@ packaging = ">=20" PyYAML = ">=5.1,<7" requests = ">=2.9.1,<3" swagger-ui-bundle = {version = ">=0.0.2,<0.1", optional = true, markers = "extra == \"swagger-ui\""} -werkzeug = ">=1.0,<3" +werkzeug = ">=1.0,<2.3" [package.extras] aiohttp = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)"] docs = ["sphinx-autoapi (==1.8.1)"] -flask = ["flask (>=1.0.4,<3)", "itsdangerous (>=0.24)"] +flask = ["flask (>=1.0.4,<2.3)", "itsdangerous (>=0.24)"] swagger-ui = ["swagger-ui-bundle (>=0.0.2,<0.1)"] -tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)", "aiohttp-remotes", "decorator (>=5,<6)", "flask (>=1.0.4,<3)", "itsdangerous (>=0.24)", "pytest (>=6,<7)", "pytest-aiohttp", "pytest-cov (>=2,<3)", "swagger-ui-bundle (>=0.0.2,<0.1)", "testfixtures (>=6,<7)"] +tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)", "aiohttp-remotes", "decorator (>=5,<6)", "flask (>=1.0.4,<2.3)", "itsdangerous (>=0.24)", "pytest (>=6,<7)", "pytest-aiohttp", "pytest-cov (>=2,<3)", "swagger-ui-bundle (>=0.0.2,<0.1)", "testfixtures (>=6,<7)"] [[package]] name = "coverage" @@ -445,20 +440,20 @@ python-versions = ">=3.6,<4.0" [[package]] name = "dateparser" -version = "1.1.2" +version = "1.1.8" description = "Date parsing library designed to parse dates from HTML pages" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" [package.dependencies] python-dateutil = "*" pytz = "*" -regex = "<2019.02.19 || >2019.02.19,<2021.8.27 || >2021.8.27,<2022.3.15" +regex = "<2019.02.19 || >2019.02.19,<2021.8.27 || >2021.8.27" tzlocal = "*" [package.extras] -calendars = ["convertdate", "convertdate", "hijri-converter"] +calendars = ["convertdate", "hijri-converter"] fasttext = ["fasttext"] langdetect = ["langdetect"] @@ -507,7 +502,7 @@ pipenv = ["pipenv"] [[package]] name = "exceptiongroup" -version = "1.0.4" +version = "1.1.1" description = "Backport of PEP 654 (exception groups)" category = "main" optional = false @@ -518,15 +513,15 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.8.0" +version = "3.10.7" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.2)", "diff-cover (>=7.5)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] name = "flake8" @@ -557,7 +552,7 @@ pycodestyle = "*" [[package]] name = "flake8-bugbear" -version = "22.10.25" +version = "22.12.6" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." category = "dev" optional = false @@ -572,11 +567,11 @@ dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"] [[package]] name = "flake8-docstrings" -version = "1.6.0" +version = "1.7.0" description = "Extension for flake8 which uses pydocstyle to check docstrings" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" [package.dependencies] flake8 = ">=3" @@ -607,7 +602,7 @@ pygments = "*" restructuredtext-lint = "*" [[package]] -name = "Flask" +name = "flask" version = "2.2.2" description = "A simple framework for building complex web applications." category = "main" @@ -626,8 +621,8 @@ async = ["asgiref (>=3.2)"] dotenv = ["python-dotenv"] [[package]] -name = "Flask-Admin" -version = "1.6.0" +name = "flask-admin" +version = "1.6.1" description = "Simple and extensible admin interface framework for Flask" category = "main" optional = false @@ -642,7 +637,7 @@ aws = ["boto"] azure = ["azure-storage-blob"] [[package]] -name = "Flask-Bcrypt" +name = "flask-bcrypt" version = "1.0.1" description = "Brcrypt hashing for Flask." category = "main" @@ -685,7 +680,7 @@ reference = "main" resolved_reference = "c18306300f4312b8d36e0197fd6b62399180d0b1" [[package]] -name = "Flask-Cors" +name = "flask-cors" version = "3.0.10" description = "A Flask extension adding a decorator for CORS support" category = "main" @@ -713,7 +708,7 @@ Werkzeug = ">=0.14" asymmetric-crypto = ["cryptography (>=3.3.1)"] [[package]] -name = "Flask-Mail" +name = "flask-mail" version = "0.9.1" description = "Flask extension for sending email" category = "main" @@ -745,20 +740,20 @@ sqlalchemy = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmal tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pytest"] [[package]] -name = "Flask-Migrate" -version = "3.1.0" +name = "flask-migrate" +version = "4.0.4" description = "SQLAlchemy database migrations for Flask applications using Alembic." category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -alembic = ">=0.7" +alembic = ">=1.9.0" Flask = ">=0.9" Flask-SQLAlchemy = ">=1.0" [[package]] -name = "Flask-RESTful" +name = "flask-restful" version = "0.3.9" description = "Simple framework for creating REST APIs" category = "main" @@ -788,7 +783,7 @@ pycryptodome = "*" [[package]] name = "flask-sqlalchemy" -version = "3.0.2" +version = "3.0.3" description = "Add SQLAlchemy support to your Flask application." category = "main" optional = false @@ -800,7 +795,7 @@ SQLAlchemy = ">=1.4.18" [[package]] name = "furo" -version = "2022.9.29" +version = "2023.3.27" description = "A clean customisable Sphinx documentation theme." category = "dev" optional = false @@ -809,16 +804,16 @@ python-versions = ">=3.7" [package.dependencies] beautifulsoup4 = "*" pygments = ">=2.7" -sphinx = ">=4.0,<6.0" +sphinx = ">=5.0,<7.0" sphinx-basic-ng = "*" [[package]] name = "gitdb" -version = "4.0.9" +version = "4.0.10" description = "Git Object Database" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] smmap = ">=3.0.1,<6" @@ -836,7 +831,7 @@ gitdb = ">=4.0.1,<5" [[package]] name = "greenlet" -version = "2.0.1" +version = "2.0.2" description = "Lightweight in-process concurrent programming" category = "main" optional = false @@ -844,7 +839,7 @@ python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" [package.extras] docs = ["Sphinx", "docutils (<0.18)"] -test = ["faulthandler", "objgraph", "psutil"] +test = ["objgraph", "psutil"] [[package]] name = "gunicorn" @@ -865,7 +860,7 @@ tornado = ["tornado (>=0.2)"] [[package]] name = "identify" -version = "2.5.6" +version = "2.5.22" description = "File identification library for Python" category = "dev" optional = false @@ -892,7 +887,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "4.13.0" +version = "6.1.0" description = "Read metadata from Python packages" category = "main" optional = false @@ -902,7 +897,7 @@ python-versions = ">=3.7" zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] @@ -916,23 +911,23 @@ python-versions = ">=3.5" [[package]] name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" [[package]] name = "isort" -version = "5.11.4" +version = "5.12.0" description = "A Python utility / library to sort Python imports." category = "main" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8.0" [package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile-deprecated-finder = ["pipreqs", "requirementslib"] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] plugins = ["setuptools"] requirements-deprecated-finder = ["pip-api", "pipreqs"] @@ -945,7 +940,7 @@ optional = false python-versions = ">=3.7" [[package]] -name = "Jinja2" +name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." category = "main" @@ -960,7 +955,7 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonschema" -version = "4.16.0" +version = "4.17.3" description = "An implementation of JSON Schema validation for Python" category = "main" optional = false @@ -1004,11 +999,11 @@ zookeeper = ["kazoo (>=1.3.1)"] [[package]] name = "lazy-object-proxy" -version = "1.7.1" +version = "1.9.0" description = "A fast and thorough lazy object proxy." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "livereload" @@ -1024,7 +1019,7 @@ tornado = {version = "*", markers = "python_version > \"2.7\""} [[package]] name = "lxml" -version = "4.9.1" +version = "4.9.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." category = "main" optional = false @@ -1037,8 +1032,8 @@ htmlsoup = ["BeautifulSoup4"] source = ["Cython (>=0.29.7)"] [[package]] -name = "Mako" -version = "1.2.3" +name = "mako" +version = "1.2.4" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." category = "main" optional = false @@ -1053,8 +1048,8 @@ lingua = ["lingua"] testing = ["pytest"] [[package]] -name = "MarkupSafe" -version = "2.1.1" +name = "markupsafe" +version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false @@ -1062,7 +1057,7 @@ python-versions = ">=3.7" [[package]] name = "marshmallow" -version = "3.18.0" +version = "3.19.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." category = "main" optional = false @@ -1072,9 +1067,9 @@ python-versions = ">=3.7" packaging = ">=17.0" [package.extras] -dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "mypy (==0.971)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "mypy (==0.971)", "pre-commit (>=2.4,<3.0)"] +dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"] +docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.3.0)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] +lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)"] tests = ["pytest", "pytz", "simplejson"] [[package]] @@ -1088,25 +1083,6 @@ python-versions = "*" [package.dependencies] marshmallow = ">=2.0.0" -[[package]] -name = "marshmallow-sqlalchemy" -version = "0.28.1" -description = "SQLAlchemy integration with the marshmallow (de)serialization library" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -marshmallow = ">=3.0.0" -packaging = ">=21.3" -SQLAlchemy = ">=1.3.0" - -[package.extras] -dev = ["flake8 (==4.0.1)", "flake8-bugbear (==22.7.1)", "pre-commit (>=2.0,<3.0)", "pytest", "pytest-lazy-fixture (>=0.6.2)", "tox"] -docs = ["alabaster (==0.7.12)", "sphinx (==4.4.0)", "sphinx-issues (==3.0.1)"] -lint = ["flake8 (==4.0.1)", "flake8-bugbear (==22.7.1)", "pre-commit (>=2.0,<3.0)"] -tests = ["pytest", "pytest-lazy-fixture (>=0.6.2)"] - [[package]] name = "mccabe" version = "0.6.1" @@ -1117,29 +1093,30 @@ python-versions = "*" [[package]] name = "mypy" -version = "0.982" +version = "1.1.1" description = "Optional static typing for Python" -category = "main" +category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] -mypy-extensions = ">=0.4.3" +mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = ">=3.10" [package.extras] dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] [[package]] name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "main" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.5" [[package]] name = "mysql-connector-python" @@ -1181,7 +1158,7 @@ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pathspec" -version = "0.10.1" +version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false @@ -1189,7 +1166,7 @@ python-versions = ">=3.7" [[package]] name = "pbr" -version = "5.10.0" +version = "5.11.1" description = "Python Build Reasonableness" category = "dev" optional = false @@ -1208,15 +1185,15 @@ flake8 = ">=3.9.1" [[package]] name = "platformdirs" -version = "2.5.2" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "3.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" @@ -1232,7 +1209,7 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "2.20.0" +version = "2.21.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false @@ -1243,12 +1220,11 @@ cfgv = ">=2.0.0" identify = ">=1.0.0" nodeenv = ">=0.11.1" pyyaml = ">=5.1" -toml = "*" -virtualenv = ">=20.0.8" +virtualenv = ">=20.10.0" [[package]] name = "pre-commit-hooks" -version = "4.3.0" +version = "4.4.0" description = "Some out-of-the-box hooks for pre-commit." category = "dev" optional = false @@ -1260,11 +1236,11 @@ tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} [[package]] name = "prompt-toolkit" -version = "3.0.31" +version = "3.0.38" description = "Library for building powerful interactive command lines in Python" category = "main" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7.0" [package.dependencies] wcwidth = "*" @@ -1279,7 +1255,7 @@ python-versions = ">=3.7" [[package]] name = "psycopg2" -version = "2.9.4" +version = "2.9.5" description = "psycopg2 - Python-PostgreSQL Database Adapter" category = "main" optional = false @@ -1311,17 +1287,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pydocstyle" -version = "6.1.1" +version = "6.3.0" description = "Python docstring style checker" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] -snowballstemmer = "*" +snowballstemmer = ">=2.2.0" [package.extras] -toml = ["toml"] +toml = ["tomli (>=1.2.3)"] [[package]] name = "pyflakes" @@ -1332,8 +1308,8 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] -name = "Pygments" -version = "2.13.0" +name = "pygments" +version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." category = "main" optional = false @@ -1358,14 +1334,14 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pylint" -version = "2.15.10" +version = "2.17.1" description = "python code static checker" category = "main" optional = false python-versions = ">=3.7.2" [package.dependencies] -astroid = ">=2.12.13,<=2.14.0-dev0" +astroid = ">=2.15.0,<=2.17.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, @@ -1395,7 +1371,7 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pyrsistent" -version = "0.18.1" +version = "0.19.3" description = "Persistent/Functional/Immutable data structures" category = "main" optional = false @@ -1403,7 +1379,7 @@ python-versions = ">=3.7" [[package]] name = "pytest" -version = "7.2.0" +version = "7.2.2" description = "pytest: simple powerful testing with Python" category = "main" optional = false @@ -1501,7 +1477,7 @@ tzdata = {version = "*", markers = "python_version >= \"3.6\""} [[package]] name = "pyupgrade" -version = "3.1.0" +version = "3.3.1" description = "A tool to automatically upgrade syntax for newer versions." category = "dev" optional = false @@ -1511,7 +1487,7 @@ python-versions = ">=3.7" tokenize-rt = ">=3.2.0" [[package]] -name = "PyYAML" +name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" category = "main" @@ -1520,11 +1496,11 @@ python-versions = ">=3.6" [[package]] name = "regex" -version = "2022.3.2" +version = "2023.3.23" description = "Alternative regular expression module, to replace re." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" [[package]] name = "reorder-python-imports" @@ -1539,7 +1515,7 @@ classify-imports = ">=4.1" [[package]] name = "requests" -version = "2.28.1" +version = "2.28.2" description = "Python HTTP for Humans." category = "main" optional = false @@ -1547,7 +1523,7 @@ python-versions = ">=3.7, <4" [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" +charset-normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" @@ -1579,7 +1555,7 @@ python-versions = "*" docutils = ">=0.11,<1.0" [[package]] -name = "ruamel.yaml" +name = "ruamel-yaml" version = "0.17.21" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" category = "main" @@ -1623,7 +1599,7 @@ gitlab = ["python-gitlab (>=1.3.0)"] [[package]] name = "sentry-sdk" -version = "1.16.0" +version = "1.18.0" description = "Python client for Sentry (https://sentry.io)" category = "main" optional = false @@ -1660,20 +1636,20 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "65.5.1" +version = "65.7.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "simplejson" -version = "3.17.6" +version = "3.18.4" description = "Simple, fast, extensible JSON encoder/decoder for Python" category = "main" optional = false @@ -1705,14 +1681,14 @@ python-versions = "*" [[package]] name = "soupsieve" -version = "2.3.2.post1" +version = "2.4" description = "A modern CSS selector implementation for Beautiful Soup." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] -name = "Sphinx" +name = "sphinx" version = "5.3.0" description = "Python documentation generator" category = "main" @@ -1745,7 +1721,7 @@ test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] [[package]] name = "sphinx-autoapi" -version = "2.0.0" +version = "2.1.0" description = "Sphinx API documentation generator" category = "main" optional = false @@ -1755,7 +1731,7 @@ python-versions = ">=3.7" astroid = ">=2.7" Jinja2 = "*" PyYAML = "*" -sphinx = ">=4.0" +sphinx = ">=5.2.0" unidecode = "*" [package.extras] @@ -1795,7 +1771,7 @@ docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-ta [[package]] name = "sphinx-click" -version = "4.3.0" +version = "4.4.0" description = "Sphinx extension that automatically documents click applications" category = "dev" optional = false @@ -1808,11 +1784,11 @@ sphinx = ">=2.0" [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.2" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +version = "1.0.4" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -1832,11 +1808,11 @@ test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.0" +version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -1898,59 +1874,43 @@ reference = "main" resolved_reference = "62454c99c3a711c38f4249a3b5e7215d42037d72" [[package]] -name = "SQLAlchemy" -version = "1.4.42" +name = "sqlalchemy" +version = "2.0.7" description = "Database Abstraction Library" category = "main" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.7" [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.2.0" [package.extras] aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] -mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] +oracle = ["cx-oracle (>=7)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "sqlalchemy-stubs" -version = "0.4" -description = "SQLAlchemy stubs and mypy plugin" -category = "main" -optional = false -python-versions = "*" -develop = false - -[package.dependencies] -mypy = ">=0.790" -typing-extensions = ">=3.7.4" - -[package.source] -type = "git" -url = "https://github.com/burnettk/sqlalchemy-stubs.git" -reference = "scoped-session-delete" -resolved_reference = "d1176931684ce5b327539cc9567d4a1cd8ef1efd" +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3-binary"] [[package]] name = "stevedore" -version = "4.0.1" +version = "5.0.0" description = "Manage dynamic plugins for Python applications" category = "dev" optional = false @@ -1972,11 +1932,11 @@ Jinja2 = ">=2.0" [[package]] name = "tokenize-rt" -version = "4.2.1" +version = "5.0.0" description = "A wrapper around the stdlib `tokenize` which roundtrips." category = "dev" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" [[package]] name = "toml" @@ -1996,11 +1956,11 @@ python-versions = ">=3.7" [[package]] name = "tomlkit" -version = "0.11.6" +version = "0.11.7" description = "Style preserving TOML library" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "tornado" @@ -2032,14 +1992,14 @@ python-versions = "*" [[package]] name = "types-dateparser" -version = "1.1.4.1" +version = "1.1.4.9" description = "Typing stubs for dateparser" category = "main" optional = false python-versions = "*" [[package]] -name = "types-Flask" +name = "types-flask" version = "1.1.6" description = "Typing stubs for Flask" category = "main" @@ -2052,7 +2012,7 @@ types-Jinja2 = "*" types-Werkzeug = "*" [[package]] -name = "types-Jinja2" +name = "types-jinja2" version = "2.11.9" description = "Typing stubs for Jinja2" category = "main" @@ -2063,7 +2023,7 @@ python-versions = "*" types-MarkupSafe = "*" [[package]] -name = "types-MarkupSafe" +name = "types-markupsafe" version = "1.1.10" description = "Typing stubs for MarkupSafe" category = "main" @@ -2072,15 +2032,15 @@ python-versions = "*" [[package]] name = "types-pytz" -version = "2022.5.0.0" +version = "2022.7.1.2" description = "Typing stubs for pytz" category = "main" optional = false python-versions = "*" [[package]] -name = "types-PyYAML" -version = "6.0.12" +name = "types-pyyaml" +version = "6.0.12.9" description = "Typing stubs for PyYAML" category = "main" optional = false @@ -2088,7 +2048,7 @@ python-versions = "*" [[package]] name = "types-requests" -version = "2.28.11.2" +version = "2.28.11.17" description = "Typing stubs for requests" category = "main" optional = false @@ -2099,14 +2059,14 @@ types-urllib3 = "<1.27" [[package]] name = "types-urllib3" -version = "1.26.25.1" +version = "1.26.25.10" description = "Typing stubs for urllib3" category = "main" optional = false python-versions = "*" [[package]] -name = "types-Werkzeug" +name = "types-werkzeug" version = "1.0.9" description = "Typing stubs for Werkzeug" category = "main" @@ -2115,7 +2075,7 @@ python-versions = "*" [[package]] name = "typing-extensions" -version = "4.4.0" +version = "4.5.0" description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false @@ -2123,7 +2083,7 @@ python-versions = ">=3.7" [[package]] name = "tzdata" -version = "2022.5" +version = "2023.2" description = "Provider of IANA time zone data" category = "main" optional = false @@ -2131,22 +2091,21 @@ python-versions = ">=2" [[package]] name = "tzlocal" -version = "4.2" +version = "4.3" description = "tzinfo object for the local timezone" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] pytz-deprecation-shim = "*" tzdata = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] -devenv = ["black", "pyroma", "pytest-cov", "zest.releaser"] -test = ["pytest (>=4.3)", "pytest-mock (>=3.3)"] +devenv = ["black", "check-manifest", "flake8", "pyroma", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] [[package]] -name = "Unidecode" +name = "unidecode" version = "1.3.6" description = "ASCII transliterations of Unicode text" category = "main" @@ -2155,11 +2114,11 @@ python-versions = ">=3.5" [[package]] name = "urllib3" -version = "1.26.12" +version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] @@ -2176,24 +2135,24 @@ python-versions = ">=3.6" [[package]] name = "virtualenv" -version = "20.16.5" +version = "20.21.0" description = "Virtual Python Environment builder" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] -distlib = ">=0.3.5,<1" +distlib = ">=0.3.6,<1" filelock = ">=3.4.1,<4" -platformdirs = ">=2.4,<3" +platformdirs = ">=2.4,<4" [package.extras] -docs = ["proselint (>=0.13)", "sphinx (>=5.1.1)", "sphinx-argparse (>=0.3.1)", "sphinx-rtd-theme (>=1)", "towncrier (>=21.9)"] -testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] [[package]] name = "wcwidth" -version = "0.2.5" +version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" category = "main" optional = false @@ -2215,14 +2174,14 @@ watchdog = ["watchdog"] [[package]] name = "wrapt" -version = "1.14.1" +version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] -name = "WTForms" +name = "wtforms" version = "3.0.1" description = "Form validation and rendering for Python web development." category = "main" @@ -2237,7 +2196,7 @@ email = ["email-validator"] [[package]] name = "xdoctest" -version = "1.1.0" +version = "1.1.1" description = "A rewrite of the builtin doctest module" category = "dev" optional = false @@ -2249,41 +2208,43 @@ Pygments = {version = "*", optional = true, markers = "python_version >= \"3.5.0 six = "*" [package.extras] -all = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "cmake", "codecov", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "six", "typing"] -all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "cmake (==3.21.2)", "codecov (==2.0.15)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "six (==1.11.0)", "typing (==3.7.4)"] +all = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "codecov", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "pytest", "pytest", "pytest", "pytest-cov", "six", "tomli", "typing"] +all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "codecov (==2.0.15)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "six (==1.11.0)", "tomli (==0.2.0)", "typing (==3.7.4)"] colors = ["Pygments", "Pygments", "colorama"] jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert"] optional = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "tomli"] optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"] runtime-strict = ["six (==1.11.0)"] -tests = ["cmake", "codecov", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "typing"] -tests-strict = ["cmake (==3.21.2)", "codecov (==2.0.15)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "typing (==3.7.4)"] +tests = ["codecov", "pytest", "pytest", "pytest", "pytest-cov", "typing"] +tests-binary = ["cmake", "cmake", "ninja", "ninja", "pybind11", "pybind11", "scikit-build", "scikit-build"] +tests-binary-strict = ["cmake (==3.21.2)", "cmake (==3.25.0)", "ninja (==1.10.2)", "ninja (==1.11.1)", "pybind11 (==2.10.3)", "pybind11 (==2.7.1)", "scikit-build (==0.11.1)", "scikit-build (==0.16.1)"] +tests-strict = ["codecov (==2.0.15)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "typing (==3.7.4)"] [[package]] name = "zipp" -version = "3.10.0" +version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.12" -content-hash = "723a73cc0d56d4bfe4a791356cf36675a6bcf289a4959abc77c5512738b88d51" +content-hash = "9dfdc4d01b78fbbed6cbb827806df1202840c8b45b957b724a58216183090d94" [metadata.files] alabaster = [ - {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, - {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, ] alembic = [ - {file = "alembic-1.8.1-py3-none-any.whl", hash = "sha256:0a024d7f2de88d738d7395ff866997314c837be6104e90c5724350313dee4da4"}, - {file = "alembic-1.8.1.tar.gz", hash = "sha256:cd0b5e45b14b706426b833f06369b9a6d5ee03f826ec3238723ce8caaf6e5ffa"}, + {file = "alembic-1.10.2-py3-none-any.whl", hash = "sha256:8b48368f6533c064b39c024e1daba15ae7f947eac84185c28c06bbe1301a5497"}, + {file = "alembic-1.10.2.tar.gz", hash = "sha256:457eafbdc0769d855c2c92cbafe6b7f319f916c80cf4ed02b8f394f38b51b89d"}, ] amqp = [ {file = "amqp-5.1.1-py3-none-any.whl", hash = "sha256:6f0956d2c23d8fa6e7691934d8c3930eadb44972cbbd1a7ae3a520f735d43359"}, @@ -2294,20 +2255,20 @@ aniso8601 = [ {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, ] apscheduler = [ - {file = "APScheduler-3.9.1.post1-py2.py3-none-any.whl", hash = "sha256:c8c618241dbb2785ed5a687504b14cb1851d6f7b5a4edf3a51e39cc6a069967a"}, - {file = "APScheduler-3.9.1.post1.tar.gz", hash = "sha256:b2bea0309569da53a7261bfa0ce19c67ddbfe151bda776a6a907579fdbd3eb2a"}, + {file = "APScheduler-3.10.1-py3-none-any.whl", hash = "sha256:e813ad5ada7aff36fb08cdda746b520531eaac7757832abc204868ba78e0c8f6"}, + {file = "APScheduler-3.10.1.tar.gz", hash = "sha256:0293937d8f6051a0f493359440c1a1b93e882c57daf0197afeff0e727777b96e"}, ] astroid = [ - {file = "astroid-2.13.3-py3-none-any.whl", hash = "sha256:14c1603c41cc61aae731cad1884a073c4645e26f126d13ac8346113c95577f3b"}, - {file = "astroid-2.13.3.tar.gz", hash = "sha256:6afc22718a48a689ca24a97981ad377ba7fb78c133f40335dfd16772f29bcfb1"}, + {file = "astroid-2.15.1-py3-none-any.whl", hash = "sha256:89860bda98fe2bbd1f5d262229be7629d778ce280de68d95d4a73d1f592ad268"}, + {file = "astroid-2.15.1.tar.gz", hash = "sha256:af4e0aff46e2868218502789898269ed95b663fba49e65d91c1e09c966266c34"}, ] attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, ] -Babel = [ - {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, - {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, +babel = [ + {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, + {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, ] bandit = [ {file = "bandit-1.7.2-py3-none-any.whl", hash = "sha256:e20402cadfd126d85b68ed4c8862959663c8c372dbbb1fca8f8e2c9f55a067ec"}, @@ -2337,35 +2298,26 @@ bcrypt = [ {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, ] beautifulsoup4 = [ - {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, - {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, + {file = "beautifulsoup4-4.12.0-py3-none-any.whl", hash = "sha256:2130a5ad7f513200fae61a17abb5e338ca980fa28c439c0571014bc0217e9591"}, + {file = "beautifulsoup4-4.12.0.tar.gz", hash = "sha256:c5fceeaec29d09c84970e47c65f2f0efe57872f7cff494c9691a26ec0ff13234"}, ] billiard = [ {file = "billiard-3.6.4.0-py3-none-any.whl", hash = "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b"}, {file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"}, ] black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, ] blinker = [ {file = "blinker-1.5-py2.py3-none-any.whl", hash = "sha256:1eb563df6fdbc39eeddc177d953203f99f097e9bf0e2b8f9f3cf18b6ca425e36"}, @@ -2450,8 +2402,81 @@ cfgv = [ {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, ] classify-imports = [ {file = "classify_imports-4.2.0-py2.py3-none-any.whl", hash = "sha256:dbbc264b70a470ed8c6c95976a11dfb8b7f63df44ed1af87328bbed2663f5161"}, @@ -2478,16 +2503,16 @@ clickclick = [ {file = "clickclick-20.10.2.tar.gz", hash = "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c"}, ] colorama = [ - {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, - {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] configparser = [ {file = "configparser-5.3.0-py3-none-any.whl", hash = "sha256:b065779fd93c6bf4cee42202fa4351b4bb842e96a3fb469440e484517a49b9fa"}, {file = "configparser-5.3.0.tar.gz", hash = "sha256:8be267824b541c09b08db124917f48ab525a6c3e837011f3130781a224c57090"}, ] connexion = [ - {file = "connexion-2.14.1-py2.py3-none-any.whl", hash = "sha256:f343717241b4c4802a694c38fee66fb1693c897fe4ea5a957fa9b3b07caf6394"}, - {file = "connexion-2.14.1.tar.gz", hash = "sha256:99aa5781e70a7b94f8ffae8cf89f309d49cdb811bbd65a8e2f2546f3b19a01e6"}, + {file = "connexion-2.14.2-py2.py3-none-any.whl", hash = "sha256:a73b96a0e07b16979a42cde7c7e26afe8548099e352cf350f80c57185e0e0b36"}, + {file = "connexion-2.14.2.tar.gz", hash = "sha256:dbc06f52ebeebcf045c9904d570f24377e8bbd5a6521caef15a06f634cf85646"}, ] coverage = [ {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, @@ -2571,8 +2596,8 @@ darglint = [ {file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"}, ] dateparser = [ - {file = "dateparser-1.1.2-py2.py3-none-any.whl", hash = "sha256:d31659dc806a7d88e2b510b2c74f68b525ae531f145c62a57a99bd616b7f90cf"}, - {file = "dateparser-1.1.2.tar.gz", hash = "sha256:3821bf191f95b2658c4abd91571c09821ce7a2bc179bf6cefd8b4515c3ccf9ef"}, + {file = "dateparser-1.1.8-py2.py3-none-any.whl", hash = "sha256:070b29b5bbf4b1ec2cd51c96ea040dc68a614de703910a91ad1abba18f9f379f"}, + {file = "dateparser-1.1.8.tar.gz", hash = "sha256:86b8b7517efcc558f085a142cdb7620f0921543fcabdb538c8a4c4001d8178e3"}, ] dill = [ {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, @@ -2591,12 +2616,12 @@ dparse = [ {file = "dparse-0.6.2.tar.gz", hash = "sha256:d45255bda21f998bc7ddf2afd5e62505ba6134756ba2d42a84c56b0826614dfe"}, ] exceptiongroup = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, + {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, + {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, ] filelock = [ - {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, - {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, + {file = "filelock-3.10.7-py3-none-any.whl", hash = "sha256:bde48477b15fde2c7e5a0713cbe72721cb5a5ad32ee0b8f419907960b9d75536"}, + {file = "filelock-3.10.7.tar.gz", hash = "sha256:892be14aa8efc01673b5ed6589dbccb95f9a8596f0507e232626155495c18105"}, ] flake8 = [ {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, @@ -2606,12 +2631,12 @@ flake8-bandit = [ {file = "flake8_bandit-2.1.2.tar.gz", hash = "sha256:687fc8da2e4a239b206af2e54a90093572a60d0954f3054e23690739b0b0de3b"}, ] flake8-bugbear = [ - {file = "flake8-bugbear-22.10.25.tar.gz", hash = "sha256:89e51284eb929fbb7f23fbd428491e7427f7cdc8b45a77248daffe86a039d696"}, - {file = "flake8_bugbear-22.10.25-py3-none-any.whl", hash = "sha256:584631b608dc0d7d3f9201046d5840a45502da4732d5e8df6c7ac1694a91cb9e"}, + {file = "flake8-bugbear-22.12.6.tar.gz", hash = "sha256:4cdb2c06e229971104443ae293e75e64c6107798229202fbe4f4091427a30ac0"}, + {file = "flake8_bugbear-22.12.6-py3-none-any.whl", hash = "sha256:b69a510634f8a9c298dfda2b18a8036455e6b19ecac4fe582e4d7a0abfa50a30"}, ] flake8-docstrings = [ - {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, - {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"}, + {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, + {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, ] flake8-polyfill = [ {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, @@ -2621,19 +2646,20 @@ flake8-rst-docstrings = [ {file = "flake8-rst-docstrings-0.2.7.tar.gz", hash = "sha256:2740067ab9237559dd45a3434d8c987792c7b259ca563621a3b95efe201f5382"}, {file = "flake8_rst_docstrings-0.2.7-py3-none-any.whl", hash = "sha256:5d56075dce360bcc9c6775bfe7cb431aa395de600ca7e8d40580a28d50b2a803"}, ] -Flask = [ +flask = [ {file = "Flask-2.2.2-py3-none-any.whl", hash = "sha256:b9c46cc36662a7949f34b52d8ec7bb59c0d74ba08ba6cb9ce9adc1d8676d9526"}, {file = "Flask-2.2.2.tar.gz", hash = "sha256:642c450d19c4ad482f96729bd2a8f6d32554aa1e231f4f6b4e7e5264b16cca2b"}, ] -Flask-Admin = [ - {file = "Flask-Admin-1.6.0.tar.gz", hash = "sha256:424ffc79b7b0dfff051555686ea12e86e48dffacac14beaa319fb4502ac40988"}, +flask-admin = [ + {file = "Flask-Admin-1.6.1.tar.gz", hash = "sha256:24cae2af832b6a611a01d7dc35f42d266c1d6c75a426b869d8cb241b78233369"}, + {file = "Flask_Admin-1.6.1-py3-none-any.whl", hash = "sha256:fd8190f1ec3355913a22739c46ed3623f1d82b8112cde324c60a6fc9b21c9406"}, ] -Flask-Bcrypt = [ +flask-bcrypt = [ {file = "Flask-Bcrypt-1.0.1.tar.gz", hash = "sha256:f07b66b811417ea64eb188ae6455b0b708a793d966e1a80ceec4a23bc42a4369"}, {file = "Flask_Bcrypt-1.0.1-py3-none-any.whl", hash = "sha256:062fd991dc9118d05ac0583675507b9fe4670e44416c97e0e6819d03d01f808a"}, ] flask-bpmn = [] -Flask-Cors = [ +flask-cors = [ {file = "Flask-Cors-3.0.10.tar.gz", hash = "sha256:b60839393f3b84a0f3746f6cdca56c1ad7426aa738b70d6c61375857823181de"}, {file = "Flask_Cors-3.0.10-py2.py3-none-any.whl", hash = "sha256:74efc975af1194fc7891ff5cd85b0f7478be4f7f59fe158102e91abb72bb4438"}, ] @@ -2641,18 +2667,18 @@ flask-jwt-extended = [ {file = "Flask-JWT-Extended-4.4.4.tar.gz", hash = "sha256:62b521d75494c290a646ae8acc77123721e4364790f1e64af0038d823961fbf0"}, {file = "Flask_JWT_Extended-4.4.4-py2.py3-none-any.whl", hash = "sha256:a85eebfa17c339a7260c4643475af444784ba6de5588adda67406f0a75599553"}, ] -Flask-Mail = [ +flask-mail = [ {file = "Flask-Mail-0.9.1.tar.gz", hash = "sha256:22e5eb9a940bf407bcf30410ecc3708f3c56cc44b29c34e1726fe85006935f41"}, ] flask-marshmallow = [ {file = "flask-marshmallow-0.14.0.tar.gz", hash = "sha256:bd01a6372cbe50e36f205cfff0fc5dab0b7b662c4c8b2c4fc06a3151b2950950"}, {file = "flask_marshmallow-0.14.0-py2.py3-none-any.whl", hash = "sha256:2adcd782b5a4a6c5ae3c96701f320d8ca6997995a52b2661093c56cc3ed24754"}, ] -Flask-Migrate = [ - {file = "Flask-Migrate-3.1.0.tar.gz", hash = "sha256:57d6060839e3a7f150eaab6fe4e726d9e3e7cffe2150fb223d73f92421c6d1d9"}, - {file = "Flask_Migrate-3.1.0-py3-none-any.whl", hash = "sha256:a6498706241aba6be7a251078de9cf166d74307bca41a4ca3e403c9d39e2f897"}, +flask-migrate = [ + {file = "Flask-Migrate-4.0.4.tar.gz", hash = "sha256:73293d40b10ac17736e715b377e7b7bde474cb8105165d77474df4c3619b10b3"}, + {file = "Flask_Migrate-4.0.4-py3-none-any.whl", hash = "sha256:77580f27ab39bc68be4906a43c56d7674b45075bc4f883b1d0b985db5164d58f"}, ] -Flask-RESTful = [ +flask-restful = [ {file = "Flask-RESTful-0.3.9.tar.gz", hash = "sha256:ccec650b835d48192138c85329ae03735e6ced58e9b2d9c2146d6c84c06fa53e"}, {file = "Flask_RESTful-0.3.9-py2.py3-none-any.whl", hash = "sha256:4970c49b6488e46c520b325f54833374dc2b98e211f1b272bd4b0c516232afe2"}, ] @@ -2661,90 +2687,90 @@ flask-simple-crypt = [ {file = "Flask_Simple_Crypt-0.3.3-py3-none-any.whl", hash = "sha256:08c3fcad955ac148bb885b1de4798c1cfce8512452072beee414bacf1552e8ef"}, ] flask-sqlalchemy = [ - {file = "Flask-SQLAlchemy-3.0.2.tar.gz", hash = "sha256:16199f5b3ddfb69e0df2f52ae4c76aedbfec823462349dabb21a1b2e0a2b65e9"}, - {file = "Flask_SQLAlchemy-3.0.2-py3-none-any.whl", hash = "sha256:7d0cd9cf73e64a996bb881a1ebd01633fc5a6d11c36ea27f7b5e251dc45476e7"}, + {file = "Flask-SQLAlchemy-3.0.3.tar.gz", hash = "sha256:2764335f3c9d7ebdc9ed6044afaf98aae9fa50d7a074cef55dde307ec95903ec"}, + {file = "Flask_SQLAlchemy-3.0.3-py3-none-any.whl", hash = "sha256:add5750b2f9cd10512995261ee2aa23fab85bd5626061aa3c564b33bb4aa780a"}, ] furo = [ - {file = "furo-2022.9.29-py3-none-any.whl", hash = "sha256:559ee17999c0f52728481dcf6b1b0cf8c9743e68c5e3a18cb45a7992747869a9"}, - {file = "furo-2022.9.29.tar.gz", hash = "sha256:d4238145629c623609c2deb5384f8d036e2a1ee2a101d64b67b4348112470dbd"}, + {file = "furo-2023.3.27-py3-none-any.whl", hash = "sha256:4ab2be254a2d5e52792d0ca793a12c35582dd09897228a6dd47885dabd5c9521"}, + {file = "furo-2023.3.27.tar.gz", hash = "sha256:b99e7867a5cc833b2b34d7230631dd6558c7a29f93071fdbb5709634bb33c5a5"}, ] gitdb = [ - {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, - {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, ] gitpython = [ {file = "GitPython-3.1.31-py3-none-any.whl", hash = "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d"}, {file = "GitPython-3.1.31.tar.gz", hash = "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573"}, ] greenlet = [ - {file = "greenlet-2.0.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:9ed358312e63bf683b9ef22c8e442ef6c5c02973f0c2a939ec1d7b50c974015c"}, - {file = "greenlet-2.0.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4f09b0010e55bec3239278f642a8a506b91034f03a4fb28289a7d448a67f1515"}, - {file = "greenlet-2.0.1-cp27-cp27m-win32.whl", hash = "sha256:1407fe45246632d0ffb7a3f4a520ba4e6051fc2cbd61ba1f806900c27f47706a"}, - {file = "greenlet-2.0.1-cp27-cp27m-win_amd64.whl", hash = "sha256:3001d00eba6bbf084ae60ec7f4bb8ed375748f53aeaefaf2a37d9f0370558524"}, - {file = "greenlet-2.0.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d566b82e92ff2e09dd6342df7e0eb4ff6275a3f08db284888dcd98134dbd4243"}, - {file = "greenlet-2.0.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0722c9be0797f544a3ed212569ca3fe3d9d1a1b13942d10dd6f0e8601e484d26"}, - {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d37990425b4687ade27810e3b1a1c37825d242ebc275066cfee8cb6b8829ccd"}, - {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be35822f35f99dcc48152c9839d0171a06186f2d71ef76dc57fa556cc9bf6b45"}, - {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c140e7eb5ce47249668056edf3b7e9900c6a2e22fb0eaf0513f18a1b2c14e1da"}, - {file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d21681f09e297a5adaa73060737e3aa1279a13ecdcfcc6ef66c292cb25125b2d"}, - {file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fb412b7db83fe56847df9c47b6fe3f13911b06339c2aa02dcc09dce8bbf582cd"}, - {file = "greenlet-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6a08799e9e88052221adca55741bf106ec7ea0710bca635c208b751f0d5b617"}, - {file = "greenlet-2.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e112e03d37987d7b90c1e98ba5e1b59e1645226d78d73282f45b326f7bddcb9"}, - {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56961cfca7da2fdd178f95ca407fa330c64f33289e1804b592a77d5593d9bd94"}, - {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13ba6e8e326e2116c954074c994da14954982ba2795aebb881c07ac5d093a58a"}, - {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bf633a50cc93ed17e494015897361010fc08700d92676c87931d3ea464123ce"}, - {file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9f2c221eecb7ead00b8e3ddb913c67f75cba078fd1d326053225a3f59d850d72"}, - {file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:13ebf93c343dd8bd010cd98e617cb4c1c1f352a0cf2524c82d3814154116aa82"}, - {file = "greenlet-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:6f61d71bbc9b4a3de768371b210d906726535d6ca43506737682caa754b956cd"}, - {file = "greenlet-2.0.1-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:2d0bac0385d2b43a7bd1d651621a4e0f1380abc63d6fb1012213a401cbd5bf8f"}, - {file = "greenlet-2.0.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:f6327b6907b4cb72f650a5b7b1be23a2aab395017aa6f1adb13069d66360eb3f"}, - {file = "greenlet-2.0.1-cp35-cp35m-win32.whl", hash = "sha256:81b0ea3715bf6a848d6f7149d25bf018fd24554a4be01fcbbe3fdc78e890b955"}, - {file = "greenlet-2.0.1-cp35-cp35m-win_amd64.whl", hash = "sha256:38255a3f1e8942573b067510f9611fc9e38196077b0c8eb7a8c795e105f9ce77"}, - {file = "greenlet-2.0.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:04957dc96669be041e0c260964cfef4c77287f07c40452e61abe19d647505581"}, - {file = "greenlet-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:4aeaebcd91d9fee9aa768c1b39cb12214b30bf36d2b7370505a9f2165fedd8d9"}, - {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974a39bdb8c90a85982cdb78a103a32e0b1be986d411303064b28a80611f6e51"}, - {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dca09dedf1bd8684767bc736cc20c97c29bc0c04c413e3276e0962cd7aeb148"}, - {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4c0757db9bd08470ff8277791795e70d0bf035a011a528ee9a5ce9454b6cba2"}, - {file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5067920de254f1a2dee8d3d9d7e4e03718e8fd2d2d9db962c8c9fa781ae82a39"}, - {file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5a8e05057fab2a365c81abc696cb753da7549d20266e8511eb6c9d9f72fe3e92"}, - {file = "greenlet-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:3d75b8d013086b08e801fbbb896f7d5c9e6ccd44f13a9241d2bf7c0df9eda928"}, - {file = "greenlet-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:097e3dae69321e9100202fc62977f687454cd0ea147d0fd5a766e57450c569fd"}, - {file = "greenlet-2.0.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:cb242fc2cda5a307a7698c93173d3627a2a90d00507bccf5bc228851e8304963"}, - {file = "greenlet-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:72b00a8e7c25dcea5946692a2485b1a0c0661ed93ecfedfa9b6687bd89a24ef5"}, - {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"}, - {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"}, - {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"}, - {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"}, - {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"}, - {file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"}, - {file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"}, - {file = "greenlet-2.0.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:cd4ccc364cf75d1422e66e247e52a93da6a9b73cefa8cad696f3cbbb75af179d"}, - {file = "greenlet-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c8b1c43e75c42a6cafcc71defa9e01ead39ae80bd733a2608b297412beede68"}, - {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"}, - {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"}, - {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"}, - {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"}, - {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"}, - {file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"}, - {file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"}, - {file = "greenlet-2.0.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b1992ba9d4780d9af9726bbcef6a1db12d9ab1ccc35e5773685a24b7fb2758eb"}, - {file = "greenlet-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b5e83e4de81dcc9425598d9469a624826a0b1211380ac444c7c791d4a2137c19"}, - {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"}, - {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"}, - {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"}, - {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"}, - {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"}, - {file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"}, - {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, - {file = "greenlet-2.0.1.tar.gz", hash = "sha256:42e602564460da0e8ee67cb6d7236363ee5e131aa15943b6670e44e5c2ed0f67"}, + {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, + {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, + {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, + {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, + {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, + {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, + {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, + {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, + {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, + {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, + {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, + {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, + {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, + {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, + {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, + {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, + {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, + {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, + {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, + {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, + {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, + {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, ] gunicorn = [ {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, ] identify = [ - {file = "identify-2.5.6-py2.py3-none-any.whl", hash = "sha256:b276db7ec52d7e89f5bc4653380e33054ddc803d25875952ad90b0f012cbcdaa"}, - {file = "identify-2.5.6.tar.gz", hash = "sha256:6c32dbd747aa4ceee1df33f25fed0b0f6e0d65721b15bd151307ff7056d50245"}, + {file = "identify-2.5.22-py2.py3-none-any.whl", hash = "sha256:f0faad595a4687053669c112004178149f6c326db71ee999ae4636685753ad2f"}, + {file = "identify-2.5.22.tar.gz", hash = "sha256:f7a93d6cf98e29bd07663c60728e7a4057615068d7a639d132dc883b2d54d31e"}, ] idna = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, @@ -2755,243 +2781,257 @@ imagesize = [ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"}, - {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"}, + {file = "importlib_metadata-6.1.0-py3-none-any.whl", hash = "sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09"}, + {file = "importlib_metadata-6.1.0.tar.gz", hash = "sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20"}, ] inflection = [ {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, ] iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] isort = [ - {file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"}, - {file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"}, + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, ] itsdangerous = [ {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, ] -Jinja2 = [ +jinja2 = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] jsonschema = [ - {file = "jsonschema-4.16.0-py3-none-any.whl", hash = "sha256:9e74b8f9738d6a946d70705dc692b74b5429cd0960d58e79ffecfc43b2221eb9"}, - {file = "jsonschema-4.16.0.tar.gz", hash = "sha256:165059f076eff6971bae5b742fc029a7b4ef3f9bcf04c14e4776a7605de14b23"}, + {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, + {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, ] kombu = [ {file = "kombu-5.2.4-py3-none-any.whl", hash = "sha256:8b213b24293d3417bcf0d2f5537b7f756079e3ea232a8386dcc89a59fd2361a4"}, {file = "kombu-5.2.4.tar.gz", hash = "sha256:37cee3ee725f94ea8bb173eaab7c1760203ea53bbebae226328600f9d2799610"}, ] lazy-object-proxy = [ - {file = "lazy-object-proxy-1.7.1.tar.gz", hash = "sha256:d609c75b986def706743cdebe5e47553f4a5a1da9c5ff66d76013ef396b5a8a4"}, - {file = "lazy_object_proxy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb8c5fd1684d60a9902c60ebe276da1f2281a318ca16c1d0a96db28f62e9166b"}, - {file = "lazy_object_proxy-1.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a57d51ed2997e97f3b8e3500c984db50a554bb5db56c50b5dab1b41339b37e36"}, - {file = "lazy_object_proxy-1.7.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd45683c3caddf83abbb1249b653a266e7069a09f486daa8863fb0e7496a9fdb"}, - {file = "lazy_object_proxy-1.7.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8561da8b3dd22d696244d6d0d5330618c993a215070f473b699e00cf1f3f6443"}, - {file = "lazy_object_proxy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fccdf7c2c5821a8cbd0a9440a456f5050492f2270bd54e94360cac663398739b"}, - {file = "lazy_object_proxy-1.7.1-cp310-cp310-win32.whl", hash = "sha256:898322f8d078f2654d275124a8dd19b079080ae977033b713f677afcfc88e2b9"}, - {file = "lazy_object_proxy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:85b232e791f2229a4f55840ed54706110c80c0a210d076eee093f2b2e33e1bfd"}, - {file = "lazy_object_proxy-1.7.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:46ff647e76f106bb444b4533bb4153c7370cdf52efc62ccfc1a28bdb3cc95442"}, - {file = "lazy_object_proxy-1.7.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12f3bb77efe1367b2515f8cb4790a11cffae889148ad33adad07b9b55e0ab22c"}, - {file = "lazy_object_proxy-1.7.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c19814163728941bb871240d45c4c30d33b8a2e85972c44d4e63dd7107faba44"}, - {file = "lazy_object_proxy-1.7.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:e40f2013d96d30217a51eeb1db28c9ac41e9d0ee915ef9d00da639c5b63f01a1"}, - {file = "lazy_object_proxy-1.7.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2052837718516a94940867e16b1bb10edb069ab475c3ad84fd1e1a6dd2c0fcfc"}, - {file = "lazy_object_proxy-1.7.1-cp36-cp36m-win32.whl", hash = "sha256:6a24357267aa976abab660b1d47a34aaf07259a0c3859a34e536f1ee6e76b5bb"}, - {file = "lazy_object_proxy-1.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:6aff3fe5de0831867092e017cf67e2750c6a1c7d88d84d2481bd84a2e019ec35"}, - {file = "lazy_object_proxy-1.7.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6a6e94c7b02641d1311228a102607ecd576f70734dc3d5e22610111aeacba8a0"}, - {file = "lazy_object_proxy-1.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ce15276a1a14549d7e81c243b887293904ad2d94ad767f42df91e75fd7b5b6"}, - {file = "lazy_object_proxy-1.7.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e368b7f7eac182a59ff1f81d5f3802161932a41dc1b1cc45c1f757dc876b5d2c"}, - {file = "lazy_object_proxy-1.7.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6ecbb350991d6434e1388bee761ece3260e5228952b1f0c46ffc800eb313ff42"}, - {file = "lazy_object_proxy-1.7.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:553b0f0d8dbf21890dd66edd771f9b1b5f51bd912fa5f26de4449bfc5af5e029"}, - {file = "lazy_object_proxy-1.7.1-cp37-cp37m-win32.whl", hash = "sha256:c7a683c37a8a24f6428c28c561c80d5f4fd316ddcf0c7cab999b15ab3f5c5c69"}, - {file = "lazy_object_proxy-1.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:df2631f9d67259dc9620d831384ed7732a198eb434eadf69aea95ad18c587a28"}, - {file = "lazy_object_proxy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07fa44286cda977bd4803b656ffc1c9b7e3bc7dff7d34263446aec8f8c96f88a"}, - {file = "lazy_object_proxy-1.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4dca6244e4121c74cc20542c2ca39e5c4a5027c81d112bfb893cf0790f96f57e"}, - {file = "lazy_object_proxy-1.7.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91ba172fc5b03978764d1df5144b4ba4ab13290d7bab7a50f12d8117f8630c38"}, - {file = "lazy_object_proxy-1.7.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:043651b6cb706eee4f91854da4a089816a6606c1428fd391573ef8cb642ae4f7"}, - {file = "lazy_object_proxy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b9e89b87c707dd769c4ea91f7a31538888aad05c116a59820f28d59b3ebfe25a"}, - {file = "lazy_object_proxy-1.7.1-cp38-cp38-win32.whl", hash = "sha256:9d166602b525bf54ac994cf833c385bfcc341b364e3ee71e3bf5a1336e677b55"}, - {file = "lazy_object_proxy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:8f3953eb575b45480db6568306893f0bd9d8dfeeebd46812aa09ca9579595148"}, - {file = "lazy_object_proxy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dd7ed7429dbb6c494aa9bc4e09d94b778a3579be699f9d67da7e6804c422d3de"}, - {file = "lazy_object_proxy-1.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ed0c2b380eb6248abdef3cd425fc52f0abd92d2b07ce26359fcbc399f636ad"}, - {file = "lazy_object_proxy-1.7.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7096a5e0c1115ec82641afbdd70451a144558ea5cf564a896294e346eb611be1"}, - {file = "lazy_object_proxy-1.7.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f769457a639403073968d118bc70110e7dce294688009f5c24ab78800ae56dc8"}, - {file = "lazy_object_proxy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:39b0e26725c5023757fc1ab2a89ef9d7ab23b84f9251e28f9cc114d5b59c1b09"}, - {file = "lazy_object_proxy-1.7.1-cp39-cp39-win32.whl", hash = "sha256:2130db8ed69a48a3440103d4a520b89d8a9405f1b06e2cc81640509e8bf6548f"}, - {file = "lazy_object_proxy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61"}, - {file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"}, + {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, ] livereload = [ {file = "livereload-2.6.3-py2.py3-none-any.whl", hash = "sha256:ad4ac6f53b2d62bb6ce1a5e6e96f1f00976a32348afedcb4b6d68df2a1d346e4"}, {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, ] lxml = [ - {file = "lxml-4.9.1-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:98cafc618614d72b02185ac583c6f7796202062c41d2eeecdf07820bad3295ed"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c62e8dd9754b7debda0c5ba59d34509c4688f853588d75b53c3791983faa96fc"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21fb3d24ab430fc538a96e9fbb9b150029914805d551deeac7d7822f64631dfc"}, - {file = "lxml-4.9.1-cp27-cp27m-win32.whl", hash = "sha256:86e92728ef3fc842c50a5cb1d5ba2bc66db7da08a7af53fb3da79e202d1b2cd3"}, - {file = "lxml-4.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4cfbe42c686f33944e12f45a27d25a492cc0e43e1dc1da5d6a87cbcaf2e95627"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dad7b164905d3e534883281c050180afcf1e230c3d4a54e8038aa5cfcf312b84"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a614e4afed58c14254e67862456d212c4dcceebab2eaa44d627c2ca04bf86837"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f9ced82717c7ec65a67667bb05865ffe38af0e835cdd78728f1209c8fffe0cad"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:d9fc0bf3ff86c17348dfc5d322f627d78273eba545db865c3cd14b3f19e57fa5"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e5f66bdf0976ec667fc4594d2812a00b07ed14d1b44259d19a41ae3fff99f2b8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fe17d10b97fdf58155f858606bddb4e037b805a60ae023c009f760d8361a4eb8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8caf4d16b31961e964c62194ea3e26a0e9561cdf72eecb1781458b67ec83423d"}, - {file = "lxml-4.9.1-cp310-cp310-win32.whl", hash = "sha256:4780677767dd52b99f0af1f123bc2c22873d30b474aa0e2fc3fe5e02217687c7"}, - {file = "lxml-4.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:b122a188cd292c4d2fcd78d04f863b789ef43aa129b233d7c9004de08693728b"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:be9eb06489bc975c38706902cbc6888f39e946b81383abc2838d186f0e8b6a9d"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f1be258c4d3dc609e654a1dc59d37b17d7fef05df912c01fc2e15eb43a9735f3"}, - {file = "lxml-4.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:927a9dd016d6033bc12e0bf5dee1dde140235fc8d0d51099353c76081c03dc29"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9232b09f5efee6a495a99ae6824881940d6447debe272ea400c02e3b68aad85d"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:04da965dfebb5dac2619cb90fcf93efdb35b3c6994fea58a157a834f2f94b318"}, - {file = "lxml-4.9.1-cp35-cp35m-win32.whl", hash = "sha256:4d5bae0a37af799207140652a700f21a85946f107a199bcb06720b13a4f1f0b7"}, - {file = "lxml-4.9.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4878e667ebabe9b65e785ac8da4d48886fe81193a84bbe49f12acff8f7a383a4"}, - {file = "lxml-4.9.1-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:1355755b62c28950f9ce123c7a41460ed9743c699905cbe664a5bcc5c9c7c7fb"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:bcaa1c495ce623966d9fc8a187da80082334236a2a1c7e141763ffaf7a405067"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eafc048ea3f1b3c136c71a86db393be36b5b3d9c87b1c25204e7d397cee9536"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:13c90064b224e10c14dcdf8086688d3f0e612db53766e7478d7754703295c7c8"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206a51077773c6c5d2ce1991327cda719063a47adc02bd703c56a662cdb6c58b"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e8f0c9d65da595cfe91713bc1222af9ecabd37971762cb830dea2fc3b3bb2acf"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8f0a4d179c9a941eb80c3a63cdb495e539e064f8054230844dcf2fcb812b71d3"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:830c88747dce8a3e7525defa68afd742b4580df6aa2fdd6f0855481e3994d391"}, - {file = "lxml-4.9.1-cp36-cp36m-win32.whl", hash = "sha256:1e1cf47774373777936c5aabad489fef7b1c087dcd1f426b621fda9dcc12994e"}, - {file = "lxml-4.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:5974895115737a74a00b321e339b9c3f45c20275d226398ae79ac008d908bff7"}, - {file = "lxml-4.9.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:1423631e3d51008871299525b541413c9b6c6423593e89f9c4cfbe8460afc0a2"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:2aaf6a0a6465d39b5ca69688fce82d20088c1838534982996ec46633dc7ad6cc"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:9f36de4cd0c262dd9927886cc2305aa3f2210db437aa4fed3fb4940b8bf4592c"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae06c1e4bc60ee076292e582a7512f304abdf6c70db59b56745cca1684f875a4"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:57e4d637258703d14171b54203fd6822fda218c6c2658a7d30816b10995f29f3"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6d279033bf614953c3fc4a0aa9ac33a21e8044ca72d4fa8b9273fe75359d5cca"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a60f90bba4c37962cbf210f0188ecca87daafdf60271f4c6948606e4dabf8785"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ca2264f341dd81e41f3fffecec6e446aa2121e0b8d026fb5130e02de1402785"}, - {file = "lxml-4.9.1-cp37-cp37m-win32.whl", hash = "sha256:27e590352c76156f50f538dbcebd1925317a0f70540f7dc8c97d2931c595783a"}, - {file = "lxml-4.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:eea5d6443b093e1545ad0210e6cf27f920482bfcf5c77cdc8596aec73523bb7e"}, - {file = "lxml-4.9.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f05251bbc2145349b8d0b77c0d4e5f3b228418807b1ee27cefb11f69ed3d233b"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:487c8e61d7acc50b8be82bda8c8d21d20e133c3cbf41bd8ad7eb1aaeb3f07c97"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d1a92d8e90b286d491e5626af53afef2ba04da33e82e30744795c71880eaa21"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:b570da8cd0012f4af9fa76a5635cd31f707473e65a5a335b186069d5c7121ff2"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ef87fca280fb15342726bd5f980f6faf8b84a5287fcc2d4962ea8af88b35130"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:93e414e3206779ef41e5ff2448067213febf260ba747fc65389a3ddaa3fb8715"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6653071f4f9bac46fbc30f3c7838b0e9063ee335908c5d61fb7a4a86c8fd2036"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:32a73c53783becdb7eaf75a2a1525ea8e49379fb7248c3eeefb9412123536387"}, - {file = "lxml-4.9.1-cp38-cp38-win32.whl", hash = "sha256:1a7c59c6ffd6ef5db362b798f350e24ab2cfa5700d53ac6681918f314a4d3b94"}, - {file = "lxml-4.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:1436cf0063bba7888e43f1ba8d58824f085410ea2025befe81150aceb123e345"}, - {file = "lxml-4.9.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:4beea0f31491bc086991b97517b9683e5cfb369205dac0148ef685ac12a20a67"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41fb58868b816c202e8881fd0f179a4644ce6e7cbbb248ef0283a34b73ec73bb"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bd34f6d1810d9354dc7e35158aa6cc33456be7706df4420819af6ed966e85448"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:edffbe3c510d8f4bf8640e02ca019e48a9b72357318383ca60e3330c23aaffc7"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d949f53ad4fc7cf02c44d6678e7ff05ec5f5552b235b9e136bd52e9bf730b91"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:079b68f197c796e42aa80b1f739f058dcee796dc725cc9a1be0cdb08fc45b000"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9c3a88d20e4fe4a2a4a84bf439a5ac9c9aba400b85244c63a1ab7088f85d9d25"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4e285b5f2bf321fc0857b491b5028c5f276ec0c873b985d58d7748ece1d770dd"}, - {file = "lxml-4.9.1-cp39-cp39-win32.whl", hash = "sha256:ef72013e20dd5ba86a8ae1aed7f56f31d3374189aa8b433e7b12ad182c0d2dfb"}, - {file = "lxml-4.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:10d2017f9150248563bb579cd0d07c61c58da85c922b780060dcc9a3aa9f432d"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0538747a9d7827ce3e16a8fdd201a99e661c7dee3c96c885d8ecba3c35d1032c"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0645e934e940107e2fdbe7c5b6fb8ec6232444260752598bc4d09511bd056c0b"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6daa662aba22ef3258934105be2dd9afa5bb45748f4f702a3b39a5bf53a1f4dc"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:603a464c2e67d8a546ddaa206d98e3246e5db05594b97db844c2f0a1af37cf5b"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c4b2e0559b68455c085fb0f6178e9752c4be3bba104d6e881eb5573b399d1eb2"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0f3f0059891d3254c7b5fb935330d6db38d6519ecd238ca4fce93c234b4a0f73"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c852b1530083a620cb0de5f3cd6826f19862bafeaf77586f1aef326e49d95f0c"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:287605bede6bd36e930577c5925fcea17cb30453d96a7b4c63c14a257118dbb9"}, - {file = "lxml-4.9.1.tar.gz", hash = "sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"}, + {file = "lxml-4.9.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a"}, + {file = "lxml-4.9.2-cp27-cp27m-win32.whl", hash = "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de"}, + {file = "lxml-4.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975"}, + {file = "lxml-4.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184"}, + {file = "lxml-4.9.2-cp310-cp310-win32.whl", hash = "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda"}, + {file = "lxml-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1"}, + {file = "lxml-4.9.2-cp311-cp311-win32.whl", hash = "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33"}, + {file = "lxml-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"}, + {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"}, + {file = "lxml-4.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5"}, + {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"}, + {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"}, + {file = "lxml-4.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3"}, + {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b"}, + {file = "lxml-4.9.2-cp37-cp37m-win32.whl", hash = "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe"}, + {file = "lxml-4.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9"}, + {file = "lxml-4.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457"}, + {file = "lxml-4.9.2-cp38-cp38-win32.whl", hash = "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b"}, + {file = "lxml-4.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7"}, + {file = "lxml-4.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2"}, + {file = "lxml-4.9.2-cp39-cp39-win32.whl", hash = "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1"}, + {file = "lxml-4.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409"}, + {file = "lxml-4.9.2.tar.gz", hash = "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67"}, ] -Mako = [ - {file = "Mako-1.2.3-py3-none-any.whl", hash = "sha256:c413a086e38cd885088d5e165305ee8eed04e8b3f8f62df343480da0a385735f"}, - {file = "Mako-1.2.3.tar.gz", hash = "sha256:7fde96466fcfeedb0eed94f187f20b23d85e4cb41444be0e542e2c8c65c396cd"}, +mako = [ + {file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"}, + {file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"}, ] -MarkupSafe = [ - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, - {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +markupsafe = [ + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, + {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, ] marshmallow = [ - {file = "marshmallow-3.18.0-py3-none-any.whl", hash = "sha256:35e02a3a06899c9119b785c12a22f4cda361745d66a71ab691fd7610202ae104"}, - {file = "marshmallow-3.18.0.tar.gz", hash = "sha256:6804c16114f7fce1f5b4dadc31f4674af23317fcc7f075da21e35c1a35d781f7"}, + {file = "marshmallow-3.19.0-py3-none-any.whl", hash = "sha256:93f0958568da045b0021ec6aeb7ac37c81bfcccbb9a0e7ed8559885070b3a19b"}, + {file = "marshmallow-3.19.0.tar.gz", hash = "sha256:90032c0fd650ce94b6ec6dc8dfeb0e3ff50c144586462c389b81a07205bedb78"}, ] marshmallow-enum = [ {file = "marshmallow-enum-1.5.1.tar.gz", hash = "sha256:38e697e11f45a8e64b4a1e664000897c659b60aa57bfa18d44e226a9920b6e58"}, {file = "marshmallow_enum-1.5.1-py2.py3-none-any.whl", hash = "sha256:57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072"}, ] -marshmallow-sqlalchemy = [ - {file = "marshmallow-sqlalchemy-0.28.1.tar.gz", hash = "sha256:aa376747296780a56355e3067b9c8bf43a2a1c44ff985de82b3a5d9e161ca2b8"}, - {file = "marshmallow_sqlalchemy-0.28.1-py2.py3-none-any.whl", hash = "sha256:dbb061c19375eca3a7d18358d2ca8bbaee825fc3000a3f114e2698282362b536"}, -] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] mypy = [ - {file = "mypy-0.982-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5085e6f442003fa915aeb0a46d4da58128da69325d8213b4b35cc7054090aed5"}, - {file = "mypy-0.982-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41fd1cf9bc0e1c19b9af13a6580ccb66c381a5ee2cf63ee5ebab747a4badeba3"}, - {file = "mypy-0.982-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f793e3dd95e166b66d50e7b63e69e58e88643d80a3dcc3bcd81368e0478b089c"}, - {file = "mypy-0.982-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86ebe67adf4d021b28c3f547da6aa2cce660b57f0432617af2cca932d4d378a6"}, - {file = "mypy-0.982-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:175f292f649a3af7082fe36620369ffc4661a71005aa9f8297ea473df5772046"}, - {file = "mypy-0.982-cp310-cp310-win_amd64.whl", hash = "sha256:8ee8c2472e96beb1045e9081de8e92f295b89ac10c4109afdf3a23ad6e644f3e"}, - {file = "mypy-0.982-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58f27ebafe726a8e5ccb58d896451dd9a662a511a3188ff6a8a6a919142ecc20"}, - {file = "mypy-0.982-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6af646bd46f10d53834a8e8983e130e47d8ab2d4b7a97363e35b24e1d588947"}, - {file = "mypy-0.982-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7aeaa763c7ab86d5b66ff27f68493d672e44c8099af636d433a7f3fa5596d40"}, - {file = "mypy-0.982-cp37-cp37m-win_amd64.whl", hash = "sha256:724d36be56444f569c20a629d1d4ee0cb0ad666078d59bb84f8f887952511ca1"}, - {file = "mypy-0.982-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14d53cdd4cf93765aa747a7399f0961a365bcddf7855d9cef6306fa41de01c24"}, - {file = "mypy-0.982-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:26ae64555d480ad4b32a267d10cab7aec92ff44de35a7cd95b2b7cb8e64ebe3e"}, - {file = "mypy-0.982-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6389af3e204975d6658de4fb8ac16f58c14e1bacc6142fee86d1b5b26aa52bda"}, - {file = "mypy-0.982-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b35ce03a289480d6544aac85fa3674f493f323d80ea7226410ed065cd46f206"}, - {file = "mypy-0.982-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c6e564f035d25c99fd2b863e13049744d96bd1947e3d3d2f16f5828864506763"}, - {file = "mypy-0.982-cp38-cp38-win_amd64.whl", hash = "sha256:cebca7fd333f90b61b3ef7f217ff75ce2e287482206ef4a8b18f32b49927b1a2"}, - {file = "mypy-0.982-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a705a93670c8b74769496280d2fe6cd59961506c64f329bb179970ff1d24f9f8"}, - {file = "mypy-0.982-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75838c649290d83a2b83a88288c1eb60fe7a05b36d46cbea9d22efc790002146"}, - {file = "mypy-0.982-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:91781eff1f3f2607519c8b0e8518aad8498af1419e8442d5d0afb108059881fc"}, - {file = "mypy-0.982-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa97b9ddd1dd9901a22a879491dbb951b5dec75c3b90032e2baa7336777363b"}, - {file = "mypy-0.982-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a692a8e7d07abe5f4b2dd32d731812a0175626a90a223d4b58f10f458747dd8a"}, - {file = "mypy-0.982-cp39-cp39-win_amd64.whl", hash = "sha256:eb7a068e503be3543c4bd329c994103874fa543c1727ba5288393c21d912d795"}, - {file = "mypy-0.982-py3-none-any.whl", hash = "sha256:1021c241e8b6e1ca5a47e4d52601274ac078a89845cfde66c6d5f769819ffa1d"}, - {file = "mypy-0.982.tar.gz", hash = "sha256:85f7a343542dc8b1ed0a888cdd34dca56462654ef23aa673907305b260b3d746"}, + {file = "mypy-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39c7119335be05630611ee798cc982623b9e8f0cff04a0b48dfc26100e0b97af"}, + {file = "mypy-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61bf08362e93b6b12fad3eab68c4ea903a077b87c90ac06c11e3d7a09b56b9c1"}, + {file = "mypy-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbb19c9f662e41e474e0cff502b7064a7edc6764f5262b6cd91d698163196799"}, + {file = "mypy-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:315ac73cc1cce4771c27d426b7ea558fb4e2836f89cb0296cbe056894e3a1f78"}, + {file = "mypy-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5cb14ff9919b7df3538590fc4d4c49a0f84392237cbf5f7a816b4161c061829e"}, + {file = "mypy-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:26cdd6a22b9b40b2fd71881a8a4f34b4d7914c679f154f43385ca878a8297389"}, + {file = "mypy-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b5f81b40d94c785f288948c16e1f2da37203c6006546c5d947aab6f90aefef2"}, + {file = "mypy-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b437be1c02712a605591e1ed1d858aba681757a1e55fe678a15c2244cd68a5"}, + {file = "mypy-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d809f88734f44a0d44959d795b1e6f64b2bbe0ea4d9cc4776aa588bb4229fc1c"}, + {file = "mypy-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:a380c041db500e1410bb5b16b3c1c35e61e773a5c3517926b81dfdab7582be54"}, + {file = "mypy-1.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b7c7b708fe9a871a96626d61912e3f4ddd365bf7f39128362bc50cbd74a634d5"}, + {file = "mypy-1.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1c10fa12df1232c936830839e2e935d090fc9ee315744ac33b8a32216b93707"}, + {file = "mypy-1.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0a28a76785bf57655a8ea5eb0540a15b0e781c807b5aa798bd463779988fa1d5"}, + {file = "mypy-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ef6a01e563ec6a4940784c574d33f6ac1943864634517984471642908b30b6f7"}, + {file = "mypy-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d64c28e03ce40d5303450f547e07418c64c241669ab20610f273c9e6290b4b0b"}, + {file = "mypy-1.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64cc3afb3e9e71a79d06e3ed24bb508a6d66f782aff7e56f628bf35ba2e0ba51"}, + {file = "mypy-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce61663faf7a8e5ec6f456857bfbcec2901fbdb3ad958b778403f63b9e606a1b"}, + {file = "mypy-1.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2b0c373d071593deefbcdd87ec8db91ea13bd8f1328d44947e88beae21e8d5e9"}, + {file = "mypy-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:2888ce4fe5aae5a673386fa232473014056967f3904f5abfcf6367b5af1f612a"}, + {file = "mypy-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:19ba15f9627a5723e522d007fe708007bae52b93faab00f95d72f03e1afa9598"}, + {file = "mypy-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:59bbd71e5c58eed2e992ce6523180e03c221dcd92b52f0e792f291d67b15a71c"}, + {file = "mypy-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9401e33814cec6aec8c03a9548e9385e0e228fc1b8b0a37b9ea21038e64cdd8a"}, + {file = "mypy-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b398d8b1f4fba0e3c6463e02f8ad3346f71956b92287af22c9b12c3ec965a9f"}, + {file = "mypy-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:69b35d1dcb5707382810765ed34da9db47e7f95b3528334a3c999b0c90fe523f"}, + {file = "mypy-1.1.1-py3-none-any.whl", hash = "sha256:4e4e8b362cdf99ba00c2b218036002bdcdf1e0de085cdb296a49df03fb31dfc4"}, + {file = "mypy-1.1.1.tar.gz", hash = "sha256:ae9ceae0f5b9059f33dbc62dea087e942c0ccab4b7a003719cb70f9b8abfa32f"}, ] mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] mysql-connector-python = [ {file = "mysql-connector-python-8.0.32.tar.gz", hash = "sha256:c2d20b29fd096a0633f9360c275bd2434d4bcf597281991c4b7f1c820cd07b84"}, @@ -3029,36 +3069,36 @@ packaging = [ {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] pathspec = [ - {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, - {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, + {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, + {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, ] pbr = [ - {file = "pbr-5.10.0-py2.py3-none-any.whl", hash = "sha256:da3e18aac0a3c003e9eea1a81bd23e5a3a75d745670dcf736317b7d966887fdf"}, - {file = "pbr-5.10.0.tar.gz", hash = "sha256:cfcc4ff8e698256fc17ea3ff796478b050852585aa5bae79ecd05b2ab7b39b9a"}, + {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, + {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, ] pep8-naming = [ {file = "pep8-naming-0.13.2.tar.gz", hash = "sha256:93eef62f525fd12a6f8c98f4dcc17fa70baae2f37fa1f73bec00e3e44392fa48"}, {file = "pep8_naming-0.13.2-py3-none-any.whl", hash = "sha256:59e29e55c478db69cffbe14ab24b5bd2cd615c0413edf790d47d3fb7ba9a4e23"}, ] platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, + {file = "platformdirs-3.2.0-py3-none-any.whl", hash = "sha256:ebe11c0d7a805086e99506aa331612429a72ca7cd52a1f0d277dc4adc20cb10e"}, + {file = "platformdirs-3.2.0.tar.gz", hash = "sha256:d5b638ca397f25f979350ff789db335903d7ea010ab28903f57b27e1b16c2b08"}, ] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] pre-commit = [ - {file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"}, - {file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"}, + {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"}, + {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"}, ] pre-commit-hooks = [ - {file = "pre_commit_hooks-4.3.0-py2.py3-none-any.whl", hash = "sha256:9ccaf7c98794659d345080ee1ea0256a55ae059675045eebdbbc17c0be8c7e4b"}, - {file = "pre_commit_hooks-4.3.0.tar.gz", hash = "sha256:fda598a4c834d030727e6a615722718b47510f4bed72df4c949f95ba9f5aaf88"}, + {file = "pre_commit_hooks-4.4.0-py2.py3-none-any.whl", hash = "sha256:fc8837335476221ccccda3d176ed6ae29fe58753ce7e8b7863f5d0f987328fc6"}, + {file = "pre_commit_hooks-4.4.0.tar.gz", hash = "sha256:7011eed8e1a25cde94693da009cba76392194cecc2f3f06c51a44ea6ad6c2af9"}, ] prompt-toolkit = [ - {file = "prompt_toolkit-3.0.31-py3-none-any.whl", hash = "sha256:9696f386133df0fc8ca5af4895afe5d78f5fcfe5258111c2a79a1c3e41ffa96d"}, - {file = "prompt_toolkit-3.0.31.tar.gz", hash = "sha256:9ada952c9d1787f52ff6d5f3484d0b4df8952787c087edf6a1f7c2cb1ea88148"}, + {file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"}, + {file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"}, ] protobuf = [ {file = "protobuf-3.20.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99"}, @@ -3085,17 +3125,19 @@ protobuf = [ {file = "protobuf-3.20.3.tar.gz", hash = "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2"}, ] psycopg2 = [ - {file = "psycopg2-2.9.4-cp310-cp310-win32.whl", hash = "sha256:8de6a9fc5f42fa52f559e65120dcd7502394692490c98fed1221acf0819d7797"}, - {file = "psycopg2-2.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:1da77c061bdaab450581458932ae5e469cc6e36e0d62f988376e9f513f11cb5c"}, - {file = "psycopg2-2.9.4-cp36-cp36m-win32.whl", hash = "sha256:a11946bad3557ca254f17357d5a4ed63bdca45163e7a7d2bfb8e695df069cc3a"}, - {file = "psycopg2-2.9.4-cp36-cp36m-win_amd64.whl", hash = "sha256:46361c054df612c3cc813fdb343733d56543fb93565cff0f8ace422e4da06acb"}, - {file = "psycopg2-2.9.4-cp37-cp37m-win32.whl", hash = "sha256:aafa96f2da0071d6dd0cbb7633406d99f414b40ab0f918c9d9af7df928a1accb"}, - {file = "psycopg2-2.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:aa184d551a767ad25df3b8d22a0a62ef2962e0e374c04f6cbd1204947f540d61"}, - {file = "psycopg2-2.9.4-cp38-cp38-win32.whl", hash = "sha256:839f9ea8f6098e39966d97fcb8d08548fbc57c523a1e27a1f0609addf40f777c"}, - {file = "psycopg2-2.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:c7fa041b4acb913f6968fce10169105af5200f296028251d817ab37847c30184"}, - {file = "psycopg2-2.9.4-cp39-cp39-win32.whl", hash = "sha256:07b90a24d5056687781ddaef0ea172fd951f2f7293f6ffdd03d4f5077801f426"}, - {file = "psycopg2-2.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:849bd868ae3369932127f0771c08d1109b254f08d48dc42493c3d1b87cb2d308"}, - {file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"}, + {file = "psycopg2-2.9.5-cp310-cp310-win32.whl", hash = "sha256:d3ef67e630b0de0779c42912fe2cbae3805ebaba30cda27fea2a3de650a9414f"}, + {file = "psycopg2-2.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:4cb9936316d88bfab614666eb9e32995e794ed0f8f6b3b718666c22819c1d7ee"}, + {file = "psycopg2-2.9.5-cp311-cp311-win32.whl", hash = "sha256:093e3894d2d3c592ab0945d9eba9d139c139664dcf83a1c440b8a7aa9bb21955"}, + {file = "psycopg2-2.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:920bf418000dd17669d2904472efeab2b20546efd0548139618f8fa305d1d7ad"}, + {file = "psycopg2-2.9.5-cp36-cp36m-win32.whl", hash = "sha256:b9ac1b0d8ecc49e05e4e182694f418d27f3aedcfca854ebd6c05bb1cffa10d6d"}, + {file = "psycopg2-2.9.5-cp36-cp36m-win_amd64.whl", hash = "sha256:fc04dd5189b90d825509caa510f20d1d504761e78b8dfb95a0ede180f71d50e5"}, + {file = "psycopg2-2.9.5-cp37-cp37m-win32.whl", hash = "sha256:922cc5f0b98a5f2b1ff481f5551b95cd04580fd6f0c72d9b22e6c0145a4840e0"}, + {file = "psycopg2-2.9.5-cp37-cp37m-win_amd64.whl", hash = "sha256:1e5a38aa85bd660c53947bd28aeaafb6a97d70423606f1ccb044a03a1203fe4a"}, + {file = "psycopg2-2.9.5-cp38-cp38-win32.whl", hash = "sha256:f5b6320dbc3cf6cfb9f25308286f9f7ab464e65cfb105b64cc9c52831748ced2"}, + {file = "psycopg2-2.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:1a5c7d7d577e0eabfcf15eb87d1e19314c8c4f0e722a301f98e0e3a65e238b4e"}, + {file = "psycopg2-2.9.5-cp39-cp39-win32.whl", hash = "sha256:322fd5fca0b1113677089d4ebd5222c964b1760e361f151cbb2706c4912112c5"}, + {file = "psycopg2-2.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:190d51e8c1b25a47484e52a79638a8182451d6f6dff99f26ad9bd81e5359a0fa"}, + {file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"}, ] pycodestyle = [ {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, @@ -3141,55 +3183,61 @@ pycryptodome = [ {file = "pycryptodome-3.17.tar.gz", hash = "sha256:bce2e2d8e82fcf972005652371a3e8731956a0c1fbb719cc897943b3695ad91b"}, ] pydocstyle = [ - {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, - {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, + {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, + {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, ] pyflakes = [ {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, ] -Pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, +pygments = [ + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, ] pyjwt = [ {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"}, {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"}, ] pylint = [ - {file = "pylint-2.15.10-py3-none-any.whl", hash = "sha256:9df0d07e8948a1c3ffa3b6e2d7e6e63d9fb457c5da5b961ed63106594780cc7e"}, - {file = "pylint-2.15.10.tar.gz", hash = "sha256:b3dc5ef7d33858f297ac0d06cc73862f01e4f2e74025ec3eff347ce0bc60baf5"}, + {file = "pylint-2.17.1-py3-none-any.whl", hash = "sha256:8660a54e3f696243d644fca98f79013a959c03f979992c1ab59c24d3f4ec2700"}, + {file = "pylint-2.17.1.tar.gz", hash = "sha256:d4d009b0116e16845533bc2163493d6681846ac725eab8ca8014afb520178ddd"}, ] pyparsing = [ {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pyrsistent = [ - {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, - {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, - {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, - {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, + {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, + {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, + {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, + {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, + {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, + {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, ] pytest = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, + {file = "pytest-7.2.2-py3-none-any.whl", hash = "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e"}, + {file = "pytest-7.2.2.tar.gz", hash = "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4"}, ] pytest-flask = [ {file = "pytest-flask-1.2.0.tar.gz", hash = "sha256:46fde652f77777bf02dc91205aec4ce20cdf2acbbbd66a918ab91f5c14693d3d"}, @@ -3216,10 +3264,10 @@ pytz-deprecation-shim = [ {file = "pytz_deprecation_shim-0.1.0.post0.tar.gz", hash = "sha256:af097bae1b616dde5c5744441e2ddc69e74dfdcb0c263129610d85b87445a59d"}, ] pyupgrade = [ - {file = "pyupgrade-3.1.0-py2.py3-none-any.whl", hash = "sha256:77c6101a710be3e24804891e43388cedbee617258e93b09c8c5e58de08617758"}, - {file = "pyupgrade-3.1.0.tar.gz", hash = "sha256:7a8d393d85e15e0e2753e90b7b2e173b9d29dfd71e61f93d93e985b242627ed3"}, + {file = "pyupgrade-3.3.1-py2.py3-none-any.whl", hash = "sha256:3b93641963df022d605c78aeae4b5956a5296ea24701eafaef9c487527b77e60"}, + {file = "pyupgrade-3.3.1.tar.gz", hash = "sha256:f88bce38b0ba92c2a9a5063c8629e456e8d919b67d2d42c7ecab82ff196f9813"}, ] -PyYAML = [ +pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, @@ -3262,88 +3310,74 @@ PyYAML = [ {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] regex = [ - {file = "regex-2022.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ab69b4fe09e296261377d209068d52402fb85ef89dc78a9ac4a29a895f4e24a7"}, - {file = "regex-2022.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5bc5f921be39ccb65fdda741e04b2555917a4bced24b4df14eddc7569be3b493"}, - {file = "regex-2022.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43eba5c46208deedec833663201752e865feddc840433285fbadee07b84b464d"}, - {file = "regex-2022.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c68d2c04f7701a418ec2e5631b7f3552efc32f6bcc1739369c6eeb1af55f62e0"}, - {file = "regex-2022.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:caa2734ada16a44ae57b229d45091f06e30a9a52ace76d7574546ab23008c635"}, - {file = "regex-2022.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef806f684f17dbd6263d72a54ad4073af42b42effa3eb42b877e750c24c76f86"}, - {file = "regex-2022.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be319f4eb400ee567b722e9ea63d5b2bb31464e3cf1b016502e3ee2de4f86f5c"}, - {file = "regex-2022.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:42bb37e2b2d25d958c25903f6125a41aaaa1ed49ca62c103331f24b8a459142f"}, - {file = "regex-2022.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fbc88d3ba402b5d041d204ec2449c4078898f89c4a6e6f0ed1c1a510ef1e221d"}, - {file = "regex-2022.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:91e0f7e7be77250b808a5f46d90bf0032527d3c032b2131b63dee54753a4d729"}, - {file = "regex-2022.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:cb3652bbe6720786b9137862205986f3ae54a09dec8499a995ed58292bdf77c2"}, - {file = "regex-2022.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:878c626cbca3b649e14e972c14539a01191d79e58934e3f3ef4a9e17f90277f8"}, - {file = "regex-2022.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6df070a986fc064d865c381aecf0aaff914178fdf6874da2f2387e82d93cc5bd"}, - {file = "regex-2022.3.2-cp310-cp310-win32.whl", hash = "sha256:b549d851f91a4efb3e65498bd4249b1447ab6035a9972f7fc215eb1f59328834"}, - {file = "regex-2022.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:8babb2b5751105dc0aef2a2e539f4ba391e738c62038d8cb331c710f6b0f3da7"}, - {file = "regex-2022.3.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1977bb64264815d3ef016625adc9df90e6d0e27e76260280c63eca993e3f455f"}, - {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e73652057473ad3e6934944af090852a02590c349357b79182c1b681da2c772"}, - {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b22ff939a8856a44f4822da38ef4868bd3a9ade22bb6d9062b36957c850e404f"}, - {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:878f5d649ba1db9f52cc4ef491f7dba2d061cdc48dd444c54260eebc0b1729b9"}, - {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0008650041531d0eadecc96a73d37c2dc4821cf51b0766e374cb4f1ddc4e1c14"}, - {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:06b1df01cf2aef3a9790858af524ae2588762c8a90e784ba00d003f045306204"}, - {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57484d39447f94967e83e56db1b1108c68918c44ab519b8ecfc34b790ca52bf7"}, - {file = "regex-2022.3.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:74d86e8924835f863c34e646392ef39039405f6ce52956d8af16497af4064a30"}, - {file = "regex-2022.3.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:ae17fc8103f3b63345709d3e9654a274eee1c6072592aec32b026efd401931d0"}, - {file = "regex-2022.3.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5f92a7cdc6a0ae2abd184e8dfd6ef2279989d24c85d2c85d0423206284103ede"}, - {file = "regex-2022.3.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:5dcc4168536c8f68654f014a3db49b6b4a26b226f735708be2054314ed4964f4"}, - {file = "regex-2022.3.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:1e30762ddddb22f7f14c4f59c34d3addabc789216d813b0f3e2788d7bcf0cf29"}, - {file = "regex-2022.3.2-cp36-cp36m-win32.whl", hash = "sha256:286ff9ec2709d56ae7517040be0d6c502642517ce9937ab6d89b1e7d0904f863"}, - {file = "regex-2022.3.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d326ff80ed531bf2507cba93011c30fff2dd51454c85f55df0f59f2030b1687b"}, - {file = "regex-2022.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9d828c5987d543d052b53c579a01a52d96b86f937b1777bbfe11ef2728929357"}, - {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c87ac58b9baaf50b6c1b81a18d20eda7e2883aa9a4fb4f1ca70f2e443bfcdc57"}, - {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6c2441538e4fadd4291c8420853431a229fcbefc1bf521810fbc2629d8ae8c2"}, - {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f3356afbb301ec34a500b8ba8b47cba0b44ed4641c306e1dd981a08b416170b5"}, - {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d96eec8550fd2fd26f8e675f6d8b61b159482ad8ffa26991b894ed5ee19038b"}, - {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf668f26604e9f7aee9f8eaae4ca07a948168af90b96be97a4b7fa902a6d2ac1"}, - {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0eb0e2845e81bdea92b8281a3969632686502565abf4a0b9e4ab1471c863d8f3"}, - {file = "regex-2022.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:87bc01226cd288f0bd9a4f9f07bf6827134dc97a96c22e2d28628e824c8de231"}, - {file = "regex-2022.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:09b4b6ccc61d4119342b26246ddd5a04accdeebe36bdfe865ad87a0784efd77f"}, - {file = "regex-2022.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:9557545c10d52c845f270b665b52a6a972884725aa5cf12777374e18f2ea8960"}, - {file = "regex-2022.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:0be0c34a39e5d04a62fd5342f0886d0e57592a4f4993b3f9d257c1f688b19737"}, - {file = "regex-2022.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7b103dffb9f6a47ed7ffdf352b78cfe058b1777617371226c1894e1be443afec"}, - {file = "regex-2022.3.2-cp37-cp37m-win32.whl", hash = "sha256:f8169ec628880bdbca67082a9196e2106060a4a5cbd486ac51881a4df805a36f"}, - {file = "regex-2022.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:4b9c16a807b17b17c4fa3a1d8c242467237be67ba92ad24ff51425329e7ae3d0"}, - {file = "regex-2022.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:67250b36edfa714ba62dc62d3f238e86db1065fccb538278804790f578253640"}, - {file = "regex-2022.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5510932596a0f33399b7fff1bd61c59c977f2b8ee987b36539ba97eb3513584a"}, - {file = "regex-2022.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6f7ee2289176cb1d2c59a24f50900f8b9580259fa9f1a739432242e7d254f93"}, - {file = "regex-2022.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d7a68fa53688e1f612c3246044157117403c7ce19ebab7d02daf45bd63913e"}, - {file = "regex-2022.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf5317c961d93c1a200b9370fb1c6b6836cc7144fef3e5a951326912bf1f5a3"}, - {file = "regex-2022.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad397bc7d51d69cb07ef89e44243f971a04ce1dca9bf24c992c362406c0c6573"}, - {file = "regex-2022.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:297c42ede2c81f0cb6f34ea60b5cf6dc965d97fa6936c11fc3286019231f0d66"}, - {file = "regex-2022.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:af4d8cc28e4c7a2f6a9fed544228c567340f8258b6d7ea815b62a72817bbd178"}, - {file = "regex-2022.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:452519bc4c973e961b1620c815ea6dd8944a12d68e71002be5a7aff0a8361571"}, - {file = "regex-2022.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cb34c2d66355fb70ae47b5595aafd7218e59bb9c00ad8cc3abd1406ca5874f07"}, - {file = "regex-2022.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d146e5591cb67c5e836229a04723a30af795ef9b70a0bbd913572e14b7b940f"}, - {file = "regex-2022.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:03299b0bcaa7824eb7c0ebd7ef1e3663302d1b533653bfe9dc7e595d453e2ae9"}, - {file = "regex-2022.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9ccb0a4ab926016867260c24c192d9df9586e834f5db83dfa2c8fffb3a6e5056"}, - {file = "regex-2022.3.2-cp38-cp38-win32.whl", hash = "sha256:f7e8f1ee28e0a05831c92dc1c0c1c94af5289963b7cf09eca5b5e3ce4f8c91b0"}, - {file = "regex-2022.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:35ed2f3c918a00b109157428abfc4e8d1ffabc37c8f9abc5939ebd1e95dabc47"}, - {file = "regex-2022.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:55820bc631684172b9b56a991d217ec7c2e580d956591dc2144985113980f5a3"}, - {file = "regex-2022.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:83f03f0bd88c12e63ca2d024adeee75234d69808b341e88343b0232329e1f1a1"}, - {file = "regex-2022.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42d6007722d46bd2c95cce700181570b56edc0dcbadbfe7855ec26c3f2d7e008"}, - {file = "regex-2022.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:320c2f4106962ecea0f33d8d31b985d3c185757c49c1fb735501515f963715ed"}, - {file = "regex-2022.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbd3fe37353c62fd0eb19fb76f78aa693716262bcd5f9c14bb9e5aca4b3f0dc4"}, - {file = "regex-2022.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17e51ad1e6131c496b58d317bc9abec71f44eb1957d32629d06013a21bc99cac"}, - {file = "regex-2022.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72bc3a5effa5974be6d965ed8301ac1e869bc18425c8a8fac179fbe7876e3aee"}, - {file = "regex-2022.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e5602a9b5074dcacc113bba4d2f011d2748f50e3201c8139ac5b68cf2a76bd8b"}, - {file = "regex-2022.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:729aa8ca624c42f309397c5fc9e21db90bf7e2fdd872461aabdbada33de9063c"}, - {file = "regex-2022.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d6ecfd1970b3380a569d7b3ecc5dd70dba295897418ed9e31ec3c16a5ab099a5"}, - {file = "regex-2022.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:13bbf0c9453c6d16e5867bda7f6c0c7cff1decf96c5498318bb87f8136d2abd4"}, - {file = "regex-2022.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:58ba41e462653eaf68fc4a84ec4d350b26a98d030be1ab24aba1adcc78ffe447"}, - {file = "regex-2022.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c0446b2871335d5a5e9fcf1462f954586b09a845832263db95059dcd01442015"}, - {file = "regex-2022.3.2-cp39-cp39-win32.whl", hash = "sha256:20e6a27959f162f979165e496add0d7d56d7038237092d1aba20b46de79158f1"}, - {file = "regex-2022.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9efa41d1527b366c88f265a227b20bcec65bda879962e3fc8a2aee11e81266d7"}, - {file = "regex-2022.3.2.tar.gz", hash = "sha256:79e5af1ff258bc0fe0bdd6f69bc4ae33935a898e3cbefbbccf22e88a27fa053b"}, + {file = "regex-2023.3.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:845a5e2d84389c4ddada1a9b95c055320070f18bb76512608374aca00d22eca8"}, + {file = "regex-2023.3.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87d9951f5a538dd1d016bdc0dcae59241d15fa94860964833a54d18197fcd134"}, + {file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37ae17d3be44c0b3f782c28ae9edd8b47c1f1776d4cabe87edc0b98e1f12b021"}, + {file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0b8eb1e3bca6b48dc721818a60ae83b8264d4089a4a41d62be6d05316ec38e15"}, + {file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df45fac182ebc3c494460c644e853515cc24f5ad9da05f8ffb91da891bfee879"}, + {file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7006105b10b59971d3b248ad75acc3651c7e4cf54d81694df5a5130a3c3f7ea"}, + {file = "regex-2023.3.23-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93f3f1aa608380fe294aa4cb82e2afda07a7598e828d0341e124b8fd9327c715"}, + {file = "regex-2023.3.23-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:787954f541ab95d8195d97b0b8cf1dc304424adb1e07365967e656b92b38a699"}, + {file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:20abe0bdf03630fe92ccafc45a599bca8b3501f48d1de4f7d121153350a2f77d"}, + {file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11d00c31aeab9a6e0503bc77e73ed9f4527b3984279d997eb145d7c7be6268fd"}, + {file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d5bbe0e1511b844794a3be43d6c145001626ba9a6c1db8f84bdc724e91131d9d"}, + {file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ea3c0cb56eadbf4ab2277e7a095676370b3e46dbfc74d5c383bd87b0d6317910"}, + {file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d895b4c863059a4934d3e874b90998df774644a41b349ebb330f85f11b4ef2c0"}, + {file = "regex-2023.3.23-cp310-cp310-win32.whl", hash = "sha256:9d764514d19b4edcc75fd8cb1423448ef393e8b6cbd94f38cab983ab1b75855d"}, + {file = "regex-2023.3.23-cp310-cp310-win_amd64.whl", hash = "sha256:11d1f2b7a0696dc0310de0efb51b1f4d813ad4401fe368e83c0c62f344429f98"}, + {file = "regex-2023.3.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8a9c63cde0eaa345795c0fdeb19dc62d22e378c50b0bc67bf4667cd5b482d98b"}, + {file = "regex-2023.3.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dd7200b4c27b68cf9c9646da01647141c6db09f48cc5b51bc588deaf8e98a797"}, + {file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22720024b90a6ba673a725dcc62e10fb1111b889305d7c6b887ac7466b74bedb"}, + {file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b190a339090e6af25f4a5fd9e77591f6d911cc7b96ecbb2114890b061be0ac1"}, + {file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e76b6fc0d8e9efa39100369a9b3379ce35e20f6c75365653cf58d282ad290f6f"}, + {file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7868b8f218bf69a2a15402fde08b08712213a1f4b85a156d90473a6fb6b12b09"}, + {file = "regex-2023.3.23-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2472428efc4127374f494e570e36b30bb5e6b37d9a754f7667f7073e43b0abdd"}, + {file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c37df2a060cb476d94c047b18572ee2b37c31f831df126c0da3cd9227b39253d"}, + {file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4479f9e2abc03362df4045b1332d4a2b7885b245a30d4f4b051c4083b97d95d8"}, + {file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2396e0678167f2d0c197da942b0b3fb48fee2f0b5915a0feb84d11b6686afe6"}, + {file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75f288c60232a5339e0ff2fa05779a5e9c74e9fc085c81e931d4a264501e745b"}, + {file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c869260aa62cee21c5eb171a466c0572b5e809213612ef8d495268cd2e34f20d"}, + {file = "regex-2023.3.23-cp311-cp311-win32.whl", hash = "sha256:25f0532fd0c53e96bad84664171969de9673b4131f2297f1db850d3918d58858"}, + {file = "regex-2023.3.23-cp311-cp311-win_amd64.whl", hash = "sha256:5ccfafd98473e007cebf7da10c1411035b7844f0f204015efd050601906dbb53"}, + {file = "regex-2023.3.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6572ff287176c0fb96568adb292674b421fa762153ed074d94b1d939ed92c253"}, + {file = "regex-2023.3.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a610e0adfcb0fc84ea25f6ea685e39e74cbcd9245a72a9a7aab85ff755a5ed27"}, + {file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086afe222d58b88b62847bdbd92079b4699350b4acab892f88a935db5707c790"}, + {file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79e29fd62fa2f597a6754b247356bda14b866131a22444d67f907d6d341e10f3"}, + {file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c07ce8e9eee878a48ebeb32ee661b49504b85e164b05bebf25420705709fdd31"}, + {file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b036f401895e854de9fefe061518e78d506d8a919cc250dc3416bca03f6f9a"}, + {file = "regex-2023.3.23-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78ac8dd8e18800bb1f97aad0d73f68916592dddf233b99d2b5cabc562088503a"}, + {file = "regex-2023.3.23-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:539dd010dc35af935b32f248099e38447bbffc10b59c2b542bceead2bed5c325"}, + {file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9bf4a5626f2a0ea006bf81e8963f498a57a47d58907eaa58f4b3e13be68759d8"}, + {file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf86b4328c204c3f315074a61bc1c06f8a75a8e102359f18ce99fbcbbf1951f0"}, + {file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:2848bf76673c83314068241c8d5b7fa9ad9bed866c979875a0e84039349e8fa7"}, + {file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c125a02d22c555e68f7433bac8449992fa1cead525399f14e47c2d98f2f0e467"}, + {file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cd1671e9d5ac05ce6aa86874dd8dfa048824d1dbe73060851b310c6c1a201a96"}, + {file = "regex-2023.3.23-cp38-cp38-win32.whl", hash = "sha256:fffe57312a358be6ec6baeb43d253c36e5790e436b7bf5b7a38df360363e88e9"}, + {file = "regex-2023.3.23-cp38-cp38-win_amd64.whl", hash = "sha256:dbb3f87e15d3dd76996d604af8678316ad2d7d20faa394e92d9394dfd621fd0c"}, + {file = "regex-2023.3.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c88e8c226473b5549fe9616980ea7ca09289246cfbdf469241edf4741a620004"}, + {file = "regex-2023.3.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6560776ec19c83f3645bbc5db64a7a5816c9d8fb7ed7201c5bcd269323d88072"}, + {file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b1fc2632c01f42e06173d8dd9bb2e74ab9b0afa1d698058c867288d2c7a31f3"}, + {file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdf7ad455f1916b8ea5cdbc482d379f6daf93f3867b4232d14699867a5a13af7"}, + {file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5fc33b27b1d800fc5b78d7f7d0f287e35079ecabe68e83d46930cf45690e1c8c"}, + {file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c49552dc938e3588f63f8a78c86f3c9c75301e813bca0bef13bdb4b87ccf364"}, + {file = "regex-2023.3.23-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e152461e9a0aedec7d37fc66ec0fa635eca984777d3d3c3e36f53bf3d3ceb16e"}, + {file = "regex-2023.3.23-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:db034255e72d2995cf581b14bb3fc9c00bdbe6822b49fcd4eef79e1d5f232618"}, + {file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:55ae114da21b7a790b90255ea52d2aa3a0d121a646deb2d3c6a3194e722fc762"}, + {file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ef3f528fe1cc3d139508fe1b22523745aa77b9d6cb5b0bf277f48788ee0b993f"}, + {file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a81c9ec59ca2303acd1ccd7b9ac409f1e478e40e96f8f79b943be476c5fdb8bb"}, + {file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cde09c4fdd070772aa2596d97e942eb775a478b32459e042e1be71b739d08b77"}, + {file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3cd9f5dd7b821f141d3a6ca0d5d9359b9221e4f051ca3139320adea9f1679691"}, + {file = "regex-2023.3.23-cp39-cp39-win32.whl", hash = "sha256:7304863f3a652dab5e68e6fb1725d05ebab36ec0390676d1736e0571ebb713ef"}, + {file = "regex-2023.3.23-cp39-cp39-win_amd64.whl", hash = "sha256:54c3fa855a3f7438149de3211738dd9b5f0c733f48b54ae05aa7fce83d48d858"}, + {file = "regex-2023.3.23.tar.gz", hash = "sha256:dc80df325b43ffea5cdea2e3eaa97a44f3dd298262b1c7fe9dbb2a9522b956a7"}, ] reorder-python-imports = [ {file = "reorder_python_imports-3.9.0-py2.py3-none-any.whl", hash = "sha256:3f9c16e8781f54c944756d0d1eb34a8c863554f7a4eb3693f574fe19b1a29b56"}, {file = "reorder_python_imports-3.9.0.tar.gz", hash = "sha256:49292ed537829a6bece9fb3746fc1bbe98f52643be5de01a4e13680268a5b0ec"}, ] requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, ] restrictedpython = [ {file = "RestrictedPython-6.0-py3-none-any.whl", hash = "sha256:3479303f7bff48a7dedad76f96e7704993c5e86c5adbd67f607295d5352f0fb8"}, @@ -3352,7 +3386,7 @@ restrictedpython = [ restructuredtext-lint = [ {file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"}, ] -"ruamel.yaml" = [ +ruamel-yaml = [ {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, ] @@ -3397,75 +3431,99 @@ safety = [ {file = "safety-2.3.5.tar.gz", hash = "sha256:a60c11f8952f412cbb165d70cb1f673a3b43a2ba9a93ce11f97e6a4de834aa3a"}, ] sentry-sdk = [ - {file = "sentry-sdk-1.16.0.tar.gz", hash = "sha256:a900845bd78c263d49695d48ce78a4bce1030bbd917e0b6cc021fc000c901113"}, - {file = "sentry_sdk-1.16.0-py2.py3-none-any.whl", hash = "sha256:633edefead34d976ff22e7edc367cdf57768e24bc714615ccae746d9d91795ae"}, + {file = "sentry-sdk-1.18.0.tar.gz", hash = "sha256:d07b9569a151033b462f7a7113ada94cc41ecf49daa83d35f5f852a0b9cf3b44"}, + {file = "sentry_sdk-1.18.0-py2.py3-none-any.whl", hash = "sha256:714203a9adcac4a4a35e348dc9d3e294ad0200a66cdca26c068967d728f34fcb"}, ] setuptools = [ - {file = "setuptools-65.5.1-py3-none-any.whl", hash = "sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31"}, - {file = "setuptools-65.5.1.tar.gz", hash = "sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f"}, + {file = "setuptools-65.7.0-py3-none-any.whl", hash = "sha256:8ab4f1dbf2b4a65f7eec5ad0c620e84c34111a68d3349833494b9088212214dd"}, + {file = "setuptools-65.7.0.tar.gz", hash = "sha256:4d3c92fac8f1118bb77a22181355e29c239cabfe2b9effdaa665c66b711136d7"}, ] simplejson = [ - {file = "simplejson-3.17.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:0de783e9c2b87bdd75b57efa2b6260c24b94605b5c9843517577d40ee0c3cc8a"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:d24a9e61df7a7787b338a58abfba975414937b609eb6b18973e25f573bc0eeeb"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:e8603e691580487f11306ecb066c76f1f4a8b54fb3bdb23fa40643a059509366"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:9b01e7b00654115965a206e3015f0166674ec1e575198a62a977355597c0bef5"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:37bc0cf0e5599f36072077e56e248f3336917ded1d33d2688624d8ed3cefd7d2"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cf6e7d5fe2aeb54898df18db1baf479863eae581cce05410f61f6b4188c8ada1"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:bdfc54b4468ed4cd7415928cbe782f4d782722a81aeb0f81e2ddca9932632211"}, - {file = "simplejson-3.17.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd16302d39c4d6f4afde80edd0c97d4db643327d355a312762ccd9bd2ca515ed"}, - {file = "simplejson-3.17.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:deac4bdafa19bbb89edfb73b19f7f69a52d0b5bd3bb0c4ad404c1bbfd7b4b7fd"}, - {file = "simplejson-3.17.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8bbdb166e2fb816e43ab034c865147edafe28e1b19c72433147789ac83e2dda"}, - {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7854326920d41c3b5d468154318fe6ba4390cb2410480976787c640707e0180"}, - {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:04e31fa6ac8e326480703fb6ded1488bfa6f1d3f760d32e29dbf66d0838982ce"}, - {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f63600ec06982cdf480899026f4fda622776f5fabed9a869fdb32d72bc17e99a"}, - {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e03c3b8cc7883a54c3f34a6a135c4a17bc9088a33f36796acdb47162791b02f6"}, - {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a2d30d6c1652140181dc6861f564449ad71a45e4f165a6868c27d36745b65d40"}, - {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a1aa6e4cae8e3b8d5321be4f51c5ce77188faf7baa9fe1e78611f93a8eed2882"}, - {file = "simplejson-3.17.6-cp310-cp310-win32.whl", hash = "sha256:97202f939c3ff341fc3fa84d15db86156b1edc669424ba20b0a1fcd4a796a045"}, - {file = "simplejson-3.17.6-cp310-cp310-win_amd64.whl", hash = "sha256:80d3bc9944be1d73e5b1726c3bbfd2628d3d7fe2880711b1eb90b617b9b8ac70"}, - {file = "simplejson-3.17.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9fa621b3c0c05d965882c920347b6593751b7ab20d8fa81e426f1735ca1a9fc7"}, - {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd2fb11922f58df8528adfca123f6a84748ad17d066007e7ac977720063556bd"}, - {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:724c1fe135aa437d5126138d977004d165a3b5e2ee98fc4eb3e7c0ef645e7e27"}, - {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4ff4ac6ff3aa8f814ac0f50bf218a2e1a434a17aafad4f0400a57a8cc62ef17f"}, - {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:67093a526e42981fdd954868062e56c9b67fdd7e712616cc3265ad0c210ecb51"}, - {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b4af7ad7e4ac515bc6e602e7b79e2204e25dbd10ab3aa2beef3c5a9cad2c7"}, - {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:1c9b1ed7ed282b36571638297525f8ef80f34b3e2d600a56f962c6044f24200d"}, - {file = "simplejson-3.17.6-cp36-cp36m-win32.whl", hash = "sha256:632ecbbd2228575e6860c9e49ea3cc5423764d5aa70b92acc4e74096fb434044"}, - {file = "simplejson-3.17.6-cp36-cp36m-win_amd64.whl", hash = "sha256:4c09868ddb86bf79b1feb4e3e7e4a35cd6e61ddb3452b54e20cf296313622566"}, - {file = "simplejson-3.17.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b6bd8144f15a491c662f06814bd8eaa54b17f26095bb775411f39bacaf66837"}, - {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5decdc78849617917c206b01e9fc1d694fd58caa961be816cb37d3150d613d9a"}, - {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:521877c7bd060470806eb6335926e27453d740ac1958eaf0d8c00911bc5e1802"}, - {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:65b998193bd7b0c7ecdfffbc825d808eac66279313cb67d8892bb259c9d91494"}, - {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac786f6cb7aa10d44e9641c7a7d16d7f6e095b138795cd43503769d4154e0dc2"}, - {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3ff5b3464e1ce86a8de8c88e61d4836927d5595c2162cab22e96ff551b916e81"}, - {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:69bd56b1d257a91e763256d63606937ae4eb890b18a789b66951c00062afec33"}, - {file = "simplejson-3.17.6-cp37-cp37m-win32.whl", hash = "sha256:b81076552d34c27e5149a40187a8f7e2abb2d3185576a317aaf14aeeedad862a"}, - {file = "simplejson-3.17.6-cp37-cp37m-win_amd64.whl", hash = "sha256:07ecaafc1b1501f275bf5acdee34a4ad33c7c24ede287183ea77a02dc071e0c0"}, - {file = "simplejson-3.17.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:068670af975247acbb9fc3d5393293368cda17026db467bf7a51548ee8f17ee1"}, - {file = "simplejson-3.17.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4d1c135af0c72cb28dd259cf7ba218338f4dc027061262e46fe058b4e6a4c6a3"}, - {file = "simplejson-3.17.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23fe704da910ff45e72543cbba152821685a889cf00fc58d5c8ee96a9bad5f94"}, - {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f444762fed1bc1fd75187ef14a20ed900c1fbb245d45be9e834b822a0223bc81"}, - {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:681eb4d37c9a9a6eb9b3245a5e89d7f7b2b9895590bb08a20aa598c1eb0a1d9d"}, - {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8e8607d8f6b4f9d46fee11447e334d6ab50e993dd4dbfb22f674616ce20907ab"}, - {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b10556817f09d46d420edd982dd0653940b90151d0576f09143a8e773459f6fe"}, - {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e1ec8a9ee0987d4524ffd6299e778c16cc35fef6d1a2764e609f90962f0b293a"}, - {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b4126cac7d69ac06ff22efd3e0b3328a4a70624fcd6bca4fc1b4e6d9e2e12bf"}, - {file = "simplejson-3.17.6-cp38-cp38-win32.whl", hash = "sha256:35a49ebef25f1ebdef54262e54ae80904d8692367a9f208cdfbc38dbf649e00a"}, - {file = "simplejson-3.17.6-cp38-cp38-win_amd64.whl", hash = "sha256:743cd768affaa508a21499f4858c5b824ffa2e1394ed94eb85caf47ac0732198"}, - {file = "simplejson-3.17.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb62d517a516128bacf08cb6a86ecd39fb06d08e7c4980251f5d5601d29989ba"}, - {file = "simplejson-3.17.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:12133863178a8080a3dccbf5cb2edfab0001bc41e5d6d2446af2a1131105adfe"}, - {file = "simplejson-3.17.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5540fba2d437edaf4aa4fbb80f43f42a8334206ad1ad3b27aef577fd989f20d9"}, - {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d74ee72b5071818a1a5dab47338e87f08a738cb938a3b0653b9e4d959ddd1fd9"}, - {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:28221620f4dcabdeac310846629b976e599a13f59abb21616356a85231ebd6ad"}, - {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b09bc62e5193e31d7f9876220fb429ec13a6a181a24d897b9edfbbdbcd678851"}, - {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7255a37ff50593c9b2f1afa8fafd6ef5763213c1ed5a9e2c6f5b9cc925ab979f"}, - {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:401d40969cee3df7bda211e57b903a534561b77a7ade0dd622a8d1a31eaa8ba7"}, - {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a649d0f66029c7eb67042b15374bd93a26aae202591d9afd71e111dd0006b198"}, - {file = "simplejson-3.17.6-cp39-cp39-win32.whl", hash = "sha256:522fad7be85de57430d6d287c4b635813932946ebf41b913fe7e880d154ade2e"}, - {file = "simplejson-3.17.6-cp39-cp39-win_amd64.whl", hash = "sha256:3fe87570168b2ae018391e2b43fbf66e8593a86feccb4b0500d134c998983ccc"}, - {file = "simplejson-3.17.6.tar.gz", hash = "sha256:cf98038d2abf63a1ada5730e91e84c642ba6c225b0198c3684151b1f80c5f8a6"}, + {file = "simplejson-3.18.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:8f381747c2edebe3c750a571e55103bfcc33b2707a9b91ae033ab9ba718d976a"}, + {file = "simplejson-3.18.4-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:094275b1b8f003afce1167c8a674cd1ee2fd48c566632dac5d149901d5012ff8"}, + {file = "simplejson-3.18.4-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:676e8c182f8079851f12ae1cee2fcebe04def2da2a5703a9d747ab125af47732"}, + {file = "simplejson-3.18.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:4b5df4ee48403885046c6f4fd8adc84c4ac0adec69482f22a17bd4ba52876341"}, + {file = "simplejson-3.18.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:edb334cab35dcd90eb563fdacb085f10e5dd0b1acb57fa43f8933308b42a8f88"}, + {file = "simplejson-3.18.4-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:b6c6cfc492710d8f0303705fa1ff7bb3d6a145f523384e45a6f3b13ada37021f"}, + {file = "simplejson-3.18.4-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ced906b172bfad62736a27cfafcb6e24bc9938533b0529ff8150f7926fe35b54"}, + {file = "simplejson-3.18.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:7701a289d45fdfeb37f1d15cf638801cea439df667a613379443772a86e82936"}, + {file = "simplejson-3.18.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:e2f87a483c4ab0bb2a9adc9ca09173e7f7cf3696e4fa67bd45a6b33181e57921"}, + {file = "simplejson-3.18.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c0444423129df448788edc66a129bc7560ad7d6a661d74f0900959c0b44349a1"}, + {file = "simplejson-3.18.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29a86bc9c8a913a4e0ffab85c563a7505cdf4bd13fba05342f8314facc0b7586"}, + {file = "simplejson-3.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2fa1ee5ca34ab2ecfbe3f7a7e952a1ecaebb5b4818f002b5b146324912ac3d5"}, + {file = "simplejson-3.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b17026f3f349a6e87818cd3531e3bbb5cc78a6f4b2b6718f574a8e0512d71e08"}, + {file = "simplejson-3.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a255d30cda6334ba780eb40a56e8134efd3453948b995d3966e45212e34bf018"}, + {file = "simplejson-3.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d9f0dfde448611f4f818da05f9b544a78f29355dc39151b0dad8e7c65c513e4f"}, + {file = "simplejson-3.18.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1085cadec0f7e76377951d7a87744628c90ac6cc634fc97eecce0c4d41ec563"}, + {file = "simplejson-3.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f15f56b3119fb71fa57eb4613bcd87eb7df6c2f3547de7d341853d3e50cef97e"}, + {file = "simplejson-3.18.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:695da62e494e4689ab78fae173a78390a175b6a5ccc4292277ce0f8dba3945d5"}, + {file = "simplejson-3.18.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:097e48686e49026836ef384c7c10ca670acc023cb16a976a689c2eb6c1852df4"}, + {file = "simplejson-3.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a56005332d70b8d02d476d4a85818b27b01e51dac1a21d5c1a1d8a5df2efb4a6"}, + {file = "simplejson-3.18.4-cp310-cp310-win32.whl", hash = "sha256:3d549efc7e8f9a180c59462b124991b690ff25c235d5cf495c3246c66a7679cd"}, + {file = "simplejson-3.18.4-cp310-cp310-win_amd64.whl", hash = "sha256:bd694c465cc61fa8e599355e535b6eb561279834d9883aeef08d0e86c44c300c"}, + {file = "simplejson-3.18.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad37f25fd8dfbed80815c3281b82a165be2a74e663856b9a50099d18789987bc"}, + {file = "simplejson-3.18.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2362c66d2c633925d90f2f177f05e0570d320d986130d34dff9ad6edbf7be8ac"}, + {file = "simplejson-3.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30e381471158290ccb79bd31e7bbda4c8f2cf7e1a5f6b557c1b97d6036ccd05b"}, + {file = "simplejson-3.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d45ed9452a42064805143480397b586ea2ea322f4b8b69034c51181e7f38342"}, + {file = "simplejson-3.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0dcc54e7cfbd9674ec4ca181e26eaa5b038446601faeaa6c83d146ddef2f2652"}, + {file = "simplejson-3.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05a668d4a93816fb8a644e90e7987aa3beeb9d2112ca50a474d41e6acb5bb88a"}, + {file = "simplejson-3.18.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da6dc0cb00ef1e1a8daf285074ca8b2bb89591170c42ceab0c37bcdb9adc802c"}, + {file = "simplejson-3.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f31e126204ec38f92dee119af87cf881044ef7dea6f7477ef774ed3d84199c24"}, + {file = "simplejson-3.18.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fb0f8b35c11fd8e4b924f974d331b20fa54555282451db7f2a3b24bd2d33cc11"}, + {file = "simplejson-3.18.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:2d1b47f768e1f4c1c8a9457effabed735939401e85c0ddcdf68444c88a9242e6"}, + {file = "simplejson-3.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6d65ea4582b47d77e9094c22eb0aeded0ebd96c1df86e988870b40c6514c6e21"}, + {file = "simplejson-3.18.4-cp311-cp311-win32.whl", hash = "sha256:32de1672f91a789cc9e1c36c406b2d75457a242d64e9e73a70b9b814ef00095e"}, + {file = "simplejson-3.18.4-cp311-cp311-win_amd64.whl", hash = "sha256:c37b092d29741096c4723f48924a80b1d3de62ca1de254ce88178fa083dd520c"}, + {file = "simplejson-3.18.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:706a7fc81ceeb321a1040d008b134056012188f95a5c31ad94fb03153b35cc84"}, + {file = "simplejson-3.18.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab64f087c5863ac621b42e227e5a43bd9b28de581afe7be12ad96562b9be8203"}, + {file = "simplejson-3.18.4-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f27a079cb009ba569983061a50a9270b7e1d35f81e4eeaf0e26f8924027e550"}, + {file = "simplejson-3.18.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ba80fbf959b5852554f23201a5f4b30885930c303546ffa883859a435ea3cf"}, + {file = "simplejson-3.18.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cdb5069870f7d26a34e5adc30672d0a7b26e652720530a023bb3a8d8a42e37f"}, + {file = "simplejson-3.18.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:340b7d085b4a5063aacb8664b1250e4a7426c16e1cc80705c548a229153af147"}, + {file = "simplejson-3.18.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:b9893852c559998f667e6434d2c2474518d4cdfd1b9cec8e57b3c9d577ba55c1"}, + {file = "simplejson-3.18.4-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:efae49d0148ec68b6e012f1b9e19bd530f4dced378ba919e3e906ae2b829cc31"}, + {file = "simplejson-3.18.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a89d7fe994b115f0a792e6673f387af3db812a1760d594abad51e0ea11d3e470"}, + {file = "simplejson-3.18.4-cp36-cp36m-win32.whl", hash = "sha256:44058bea97429cfa0d6fb1d8eb0736a77022f34a326d5bc64fd6fed8d9304571"}, + {file = "simplejson-3.18.4-cp36-cp36m-win_amd64.whl", hash = "sha256:f85d87986ca375b8305b5c4f166783b8db383a6469e8b99b8dba22878388f234"}, + {file = "simplejson-3.18.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a3bba99178f1b25878752a8bc6da2f93fbae754ebd4914d2ac4b869b9fb24102"}, + {file = "simplejson-3.18.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5f67bffa6fc68e391b2250e1feb43d534ded64a7b918eb89cf7e3e679759d94"}, + {file = "simplejson-3.18.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8ac155e3fd3b54a63040df024e57e62c130b15a2fc66eff3c2a946f42beed52"}, + {file = "simplejson-3.18.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:682b202f56d9d9e1bb22eaca3e37321002223fd5ddef7189b9233e3c14079917"}, + {file = "simplejson-3.18.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3dbfaa79b1c0efdb768392a19110f1aff793f3e8d43f57e292f46734b8affb45"}, + {file = "simplejson-3.18.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7339bd6203351555c1e728acd601ba95ebce0f6041ebdb386e025f00af3f1769"}, + {file = "simplejson-3.18.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:544e5607142d66a469ecf78a3154ec0f915834dc3b8cfdb2677a78ca58319ad6"}, + {file = "simplejson-3.18.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:56d36f47bc7c7684504f0f18feb161a0b1162546b3622e45aa6155f8285180ac"}, + {file = "simplejson-3.18.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b482d1fdd8f860e743c7de8cd6dfe54fb9fe8cd6ccba29e2966912ac89e17b2f"}, + {file = "simplejson-3.18.4-cp37-cp37m-win32.whl", hash = "sha256:313dfd911723dc3022fed7050a7b315d5d0681cd56eee08e44e2cbd39fd9ad81"}, + {file = "simplejson-3.18.4-cp37-cp37m-win_amd64.whl", hash = "sha256:f5e0a03e533313eee9437ccc6c4eab47369f17bc919b57df4a20ccd8bc85d8fd"}, + {file = "simplejson-3.18.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c4f59dd358c3a99efa46d62dc1583be3a1c37171f5240c4cbdc2d5838870902"}, + {file = "simplejson-3.18.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:041dd69026284d10f035cefb4a75026d2cfcef31f31e62585eeb2b7776e7e047"}, + {file = "simplejson-3.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47509775a5c41ec2a6cd17c9c00fc14965cad8e6670059663872ba5e39332f57"}, + {file = "simplejson-3.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1b425a857ce52e651739314e4118fc68bd702ef983148b8fd5cb6f68bb6a020"}, + {file = "simplejson-3.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:deb71e6166e4f1264174d78b5b88abd52b14c6649e6eabaf9cf93cb1c7362850"}, + {file = "simplejson-3.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:827ddc3b3603f7d0421b054388da6face7871d800c4b3bbedeedc8778e4085ea"}, + {file = "simplejson-3.18.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc74a9ef4d61e18ee6f1886b6ef1fe285b1f432885288afacfb7402f7d469448"}, + {file = "simplejson-3.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16fbebfc38ad4285c256d2430797fd669b0437d090e985c6d443521d4303b133"}, + {file = "simplejson-3.18.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e7d3f7cd57ce0c6a5bb8133f8ed5c3d1be0473a88b7d91a300626298f12d0999"}, + {file = "simplejson-3.18.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b43d3c2e204d709af955bdb904ae127fe137363ace87fbf7dc8fe6017f7f8449"}, + {file = "simplejson-3.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ab5941e1fd509fc151258477ef4b663fe14c94f8faf3581827bf4b02080fd4ba"}, + {file = "simplejson-3.18.4-cp38-cp38-win32.whl", hash = "sha256:a1163bfe5d043c20adeb5c4c8e89dd1dd39b375c8ca6f1c1e35ec537ad7a12e7"}, + {file = "simplejson-3.18.4-cp38-cp38-win_amd64.whl", hash = "sha256:8ccc982197982cdda19e3e5ba4ef7f6ad6bed3eb39bb423bfbf7fa2cd29488ab"}, + {file = "simplejson-3.18.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:01f426ee9e3a2d205aa4c22c3da996b51f2de75c4199ef703258a28b304dea8c"}, + {file = "simplejson-3.18.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46b8cc86204b51eddcf157cbaf3c44a20f24393030442af0909eeb961186cb67"}, + {file = "simplejson-3.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:65de5876e34780b43f92d9d2539de16ecc56d16f56e56e59b34adfa1cebe064f"}, + {file = "simplejson-3.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa6fe8fa94a831886ee164ac03514f361e1387a62a1b9da32fde5c0c1f27fa8d"}, + {file = "simplejson-3.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a50a9da1cf93e35f26c4ddee162abf3184a340339ec2d4001c34607b87e71b4"}, + {file = "simplejson-3.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a2285609b4edbf9957440642493788ebef6583042b3fb96217c2e71f29bc6d80"}, + {file = "simplejson-3.18.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b217201efc007166e24e9a282007cc208a2d059350a7c5bd0b0303460ad3019"}, + {file = "simplejson-3.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cc9a47bf8cde85c99db5f4a919bb756e62427ade0f2e875a6ec89ae8492d486"}, + {file = "simplejson-3.18.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e042ae053e05fe193514d51d6b0f0243729961901e9a75f8b596bfaf69522c52"}, + {file = "simplejson-3.18.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:d0d3b9f7cee233368d92c89746dde74313abafaa3ec1f0c06a3f4f164dc27bcc"}, + {file = "simplejson-3.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1844d7782652f859d9648531778582d4842d80cfff8d334eb23bb8da0d22a1b0"}, + {file = "simplejson-3.18.4-cp39-cp39-win32.whl", hash = "sha256:2a6e5c0e0817fb20dbb880c83caebbd4ef39f1901f6f8e53b73a3c74de4e5172"}, + {file = "simplejson-3.18.4-cp39-cp39-win_amd64.whl", hash = "sha256:34d95ad8e27754f0d91917600d6ea273e05c82a71021f168c45be48637d9502f"}, + {file = "simplejson-3.18.4-py3-none-any.whl", hash = "sha256:03de1ec4ad734f28ca49b0a758b997d752be0d089ed30360157c4e8811999c8f"}, + {file = "simplejson-3.18.4.tar.gz", hash = "sha256:6197cfebe659ac802a686b5408494115a7062b45cdf37679c4d6a9d4f39649b7"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, @@ -3480,16 +3538,16 @@ snowballstemmer = [ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] soupsieve = [ - {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, - {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, + {file = "soupsieve-2.4-py3-none-any.whl", hash = "sha256:49e5368c2cda80ee7e84da9dbe3e110b70a4575f196efb74e51b94549d921955"}, + {file = "soupsieve-2.4.tar.gz", hash = "sha256:e28dba9ca6c7c00173e34e4ba57448f0688bb681b7c5e8bf4971daafc093d69a"}, ] -Sphinx = [ +sphinx = [ {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, ] sphinx-autoapi = [ - {file = "sphinx-autoapi-2.0.0.tar.gz", hash = "sha256:97dcf1b5b54cd0d8efef867594e4a4f3e2d3a2c0ec1e5a891e0a61bc77046006"}, - {file = "sphinx_autoapi-2.0.0-py2.py3-none-any.whl", hash = "sha256:dab2753a38cad907bf4e61473c0da365a26bfbe69fbf5aa6e4f7d48e1cf8a148"}, + {file = "sphinx-autoapi-2.1.0.tar.gz", hash = "sha256:5b5c58064214d5a846c9c81d23f00990a64654b9bca10213231db54a241bc50f"}, + {file = "sphinx_autoapi-2.1.0-py2.py3-none-any.whl", hash = "sha256:b25c7b2cda379447b8c36b6a0e3bdf76e02fd64f7ca99d41c6cbdf130a01768f"}, ] sphinx-autobuild = [ {file = "sphinx-autobuild-2021.3.14.tar.gz", hash = "sha256:de1ca3b66e271d2b5b5140c35034c89e47f263f2cd5db302c9217065f7443f05"}, @@ -3500,20 +3558,20 @@ sphinx-basic-ng = [ {file = "sphinx_basic_ng-1.0.0b1.tar.gz", hash = "sha256:89374bd3ccd9452a301786781e28c8718e99960f2d4f411845ea75fc7bb5a9b0"}, ] sphinx-click = [ - {file = "sphinx-click-4.3.0.tar.gz", hash = "sha256:bd4db5d3c1bec345f07af07b8e28a76cfc5006d997984e38ae246bbf8b9a3b38"}, - {file = "sphinx_click-4.3.0-py3-none-any.whl", hash = "sha256:23e85a3cb0b728a421ea773699f6acadefae171d1a764a51dd8ec5981503ccbe"}, + {file = "sphinx-click-4.4.0.tar.gz", hash = "sha256:cc67692bd28f482c7f01531c61b64e9d2f069bfcf3d24cbbb51d4a84a749fa48"}, + {file = "sphinx_click-4.4.0-py3-none-any.whl", hash = "sha256:2821c10a68fc9ee6ce7c92fad26540d8d8c8f45e6d7258f0e4fb7529ae8fab49"}, ] sphinxcontrib-applehelp = [ - {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, - {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, ] sphinxcontrib-devhelp = [ {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, ] sphinxcontrib-htmlhelp = [ - {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, - {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, ] sphinxcontrib-jsmath = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, @@ -3528,61 +3586,60 @@ sphinxcontrib-serializinghtml = [ {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, ] SpiffWorkflow = [] -SQLAlchemy = [ - {file = "SQLAlchemy-1.4.42-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:28e881266a172a4d3c5929182fde6bb6fba22ac93f137d5380cc78a11a9dd124"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ca9389a00f639383c93ed00333ed763812f80b5ae9e772ea32f627043f8c9c88"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27m-win32.whl", hash = "sha256:1d0c23ecf7b3bc81e29459c34a3f4c68ca538de01254e24718a7926810dc39a6"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27m-win_amd64.whl", hash = "sha256:6c9d004eb78c71dd4d3ce625b80c96a827d2e67af9c0d32b1c1e75992a7916cc"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9e3a65ce9ed250b2f096f7b559fe3ee92e6605fab3099b661f0397a9ac7c8d95"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:2e56dfed0cc3e57b2f5c35719d64f4682ef26836b81067ee6cfad062290fd9e2"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b42c59ffd2d625b28cdb2ae4cde8488543d428cba17ff672a543062f7caee525"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22459fc1718785d8a86171bbe7f01b5c9d7297301ac150f508d06e62a2b4e8d2"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df76e9c60879fdc785a34a82bf1e8691716ffac32e7790d31a98d7dec6e81545"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-win32.whl", hash = "sha256:e7e740453f0149437c101ea4fdc7eea2689938c5760d7dcc436c863a12f1f565"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-win_amd64.whl", hash = "sha256:effc89e606165ca55f04f3f24b86d3e1c605e534bf1a96e4e077ce1b027d0b71"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:97ff50cd85bb907c2a14afb50157d0d5486a4b4639976b4a3346f34b6d1b5272"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12c6949bae10f1012ab5c0ea52ab8db99adcb8c7b717938252137cdf694c775"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11b2ec26c5d2eefbc3e6dca4ec3d3d95028be62320b96d687b6e740424f83b7d"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-win32.whl", hash = "sha256:6045b3089195bc008aee5c273ec3ba9a93f6a55bc1b288841bd4cfac729b6516"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-win_amd64.whl", hash = "sha256:0501f74dd2745ec38f44c3a3900fb38b9db1ce21586b691482a19134062bf049"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:6e39e97102f8e26c6c8550cb368c724028c575ec8bc71afbbf8faaffe2b2092a"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15d878929c30e41fb3d757a5853b680a561974a0168cd33a750be4ab93181628"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fa5b7eb2051e857bf83bade0641628efe5a88de189390725d3e6033a1fff4257"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1c5f8182b4f89628d782a183d44db51b5af84abd6ce17ebb9804355c88a7b5"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-win32.whl", hash = "sha256:a7dd5b7b34a8ba8d181402d824b87c5cee8963cb2e23aa03dbfe8b1f1e417cde"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-win_amd64.whl", hash = "sha256:5ede1495174e69e273fad68ad45b6d25c135c1ce67723e40f6cf536cb515e20b"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:9256563506e040daddccaa948d055e006e971771768df3bb01feeb4386c242b0"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4948b6c5f4e56693bbeff52f574279e4ff972ea3353f45967a14c30fb7ae2beb"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1811a0b19a08af7750c0b69e38dec3d46e47c4ec1d74b6184d69f12e1c99a5e0"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b01d9cd2f9096f688c71a3d0f33f3cd0af8549014e66a7a7dee6fc214a7277d"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-win32.whl", hash = "sha256:bd448b262544b47a2766c34c0364de830f7fb0772d9959c1c42ad61d91ab6565"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-win_amd64.whl", hash = "sha256:04f2598c70ea4a29b12d429a80fad3a5202d56dce19dd4916cc46a965a5ca2e9"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:3ab7c158f98de6cb4f1faab2d12973b330c2878d0c6b689a8ca424c02d66e1b3"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ee377eb5c878f7cefd633ab23c09e99d97c449dd999df639600f49b74725b80"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:934472bb7d8666727746a75670a1f8d91a9cae8c464bba79da30a0f6faccd9e1"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb94a3d1ba77ff2ef11912192c066f01e68416f554c194d769391638c8ad09a"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-win32.whl", hash = "sha256:f0f574465b78f29f533976c06b913e54ab4980b9931b69aa9d306afff13a9471"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-win_amd64.whl", hash = "sha256:a85723c00a636eed863adb11f1e8aaa36ad1c10089537823b4540948a8429798"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5ce6929417d5dce5ad1d3f147db81735a4a0573b8fb36e3f95500a06eaddd93e"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723e3b9374c1ce1b53564c863d1a6b2f1dc4e97b1c178d9b643b191d8b1be738"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:876eb185911c8b95342b50a8c4435e1c625944b698a5b4a978ad2ffe74502908"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fd49af453e590884d9cdad3586415922a8e9bb669d874ee1dc55d2bc425aacd"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-win32.whl", hash = "sha256:e4ef8cb3c5b326f839bfeb6af5f406ba02ad69a78c7aac0fbeeba994ad9bb48a"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-win_amd64.whl", hash = "sha256:5f966b64c852592469a7eb759615bbd351571340b8b344f1d3fa2478b5a4c934"}, - {file = "SQLAlchemy-1.4.42.tar.gz", hash = "sha256:177e41914c476ed1e1b77fd05966ea88c094053e17a85303c4ce007f88eff363"}, +sqlalchemy = [ + {file = "SQLAlchemy-2.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7917632606fc5d4be661dcde45cc415df835e594e2c50cc999a44f24b6bf6d92"}, + {file = "SQLAlchemy-2.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:32f508fef9c5a7d19411d94ef64cf5405e42c4689e51ddbb81ac9a7be045cce8"}, + {file = "SQLAlchemy-2.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0995b92612979d208189245bf87349ad9243b97b49652347a28ddee0803225a"}, + {file = "SQLAlchemy-2.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cebd161f964af58290596523c65e41a5a161a99f7212b1ae675e288a4b5e0a7c"}, + {file = "SQLAlchemy-2.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c38641f5c3714505d65dbbd8fb1350408b9ad8461769ec8e440e1177f9c92d1d"}, + {file = "SQLAlchemy-2.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:921485d1f69ed016e1f756de67d02ad4f143eb6b92b9776bfff78786d8978ab5"}, + {file = "SQLAlchemy-2.0.7-cp310-cp310-win32.whl", hash = "sha256:a65a8fd09bdffd63fa23b39cd902e6a4ca23d86ecfe129513e43767a1f3e91fb"}, + {file = "SQLAlchemy-2.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:d2e7411d5ea164c6f4d003f5d4f5e72e202956aaa7496b95bb4a4c39669e001c"}, + {file = "SQLAlchemy-2.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:432cfd77642771ee7ea0dd0f3fb664f18506a3625eab6e6d5d1d771569171270"}, + {file = "SQLAlchemy-2.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce076e25f1170000b4ecdc57a1ff8a70dbe4a5648ec3da0563ef3064e8db4f15"}, + {file = "SQLAlchemy-2.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14854bdb2a35af536d14f77dfa8dbc20e1bb1972996d64c4147e0d3165c9aaf5"}, + {file = "SQLAlchemy-2.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9020125e3be677c64d4dda7048e247343f1663089cf268a4cc98c957adb7dbe0"}, + {file = "SQLAlchemy-2.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fb649c5473f79c9a7b6133f53a31f4d87de14755c79224007eb7ec76e628551e"}, + {file = "SQLAlchemy-2.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33f73cc45ffa050f5c3b60ff4490e0ae9e02701461c1600d5ede1b008076b1b9"}, + {file = "SQLAlchemy-2.0.7-cp311-cp311-win32.whl", hash = "sha256:0789e199fbce8cb1775337afc631ed12bcc5463dd77d7a06b8dafd758cde51f8"}, + {file = "SQLAlchemy-2.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:013f4f330001e84a2b0ef1f2c9bd73169c79d582e54e1a144be1be1dbc911711"}, + {file = "SQLAlchemy-2.0.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4339110be209fea37a2bb4f35f1127c7562a0393e9e6df5d9a65cc4f5c167cb6"}, + {file = "SQLAlchemy-2.0.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7e61e2e4dfe175dc3510889e44eda1c32f55870d6950ef40519640cb266704d"}, + {file = "SQLAlchemy-2.0.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d44ff7573016fc26311b5a5c54d5656fb9e0c39e138bc8b81cb7c8667485203"}, + {file = "SQLAlchemy-2.0.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:57b80e877eb6ec63295835f8a3b86ca3a44829f80c4748e1b019e03adea550fc"}, + {file = "SQLAlchemy-2.0.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e90f0be674e0845c5c1ccfa5e31c9ee28fd406546a61afc734355cc7ea1f8f8b"}, + {file = "SQLAlchemy-2.0.7-cp37-cp37m-win32.whl", hash = "sha256:e735a635126b2338dfd3a0863b675437cb53d85885a7602b8cffb24345df33ed"}, + {file = "SQLAlchemy-2.0.7-cp37-cp37m-win_amd64.whl", hash = "sha256:ea1c63e61b5c13161c8468305f0a5837c80aae2070e33654c68dd12572b638eb"}, + {file = "SQLAlchemy-2.0.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cc337b96ec59ef29907eeadc2ac11188739281568f14c719e61550ca6d201a41"}, + {file = "SQLAlchemy-2.0.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0eac488be90dd3f7a655d2e34fa59e1305fccabc4abfbd002e3a72ae10bd2f89"}, + {file = "SQLAlchemy-2.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8ab8f90f4a13c979e6c41c9f011b655c1b9ae2df6cffa8fa2c7c4d740f3512e"}, + {file = "SQLAlchemy-2.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc370d53fee7408330099c4bcc2573a107757b203bc61f114467dfe586a0c7bd"}, + {file = "SQLAlchemy-2.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:494db0026918e3f707466a1200a5dedbf254a4bce01a3115fd95f04ba8258f09"}, + {file = "SQLAlchemy-2.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:486015a58c9a67f65a15b4f19468b35b97cee074ae55386a9c240f1da308fbfe"}, + {file = "SQLAlchemy-2.0.7-cp38-cp38-win32.whl", hash = "sha256:5f7c40ec2e3b31293184020daba95850832bea523a08496ac89b27a5276ec804"}, + {file = "SQLAlchemy-2.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:3da3dff8d9833a7d7f66a3c45a79a3955f775c79f47bb7eea266d0b4c267b17a"}, + {file = "SQLAlchemy-2.0.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:774965c41b71c8ebe3c5728bf5b9a948231fc3a0422d9fdace0686f5bb689ad6"}, + {file = "SQLAlchemy-2.0.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:94556a2a7fc3de094ea056b62845e2e6e271e26d1e1b2540a1cd2d2506257a10"}, + {file = "SQLAlchemy-2.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f15c54713a8dd57a01c974c9f96476688f6f6374d348819ed7e459535844b614"}, + {file = "SQLAlchemy-2.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea9461f6955f3cf9eff6eeec271686caed7792c76f5b966886a36a42ea46e6b2"}, + {file = "SQLAlchemy-2.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18795e87601b4244fd08b542cd6bff9ef674b17bcd34e4a3c9935398e2cc762c"}, + {file = "SQLAlchemy-2.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0b698440c477c00bdedff87348b19a79630a235864a8f4378098d61079c16ce9"}, + {file = "SQLAlchemy-2.0.7-cp39-cp39-win32.whl", hash = "sha256:38e26cf6b9b4c6c37846f7e31b42e4d664b35f055691265f07e06aeb6167c494"}, + {file = "SQLAlchemy-2.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:a6f7d1debb233f1567d700ebcdde0781a0b63db0ef266246dfbf75ae41bfdf85"}, + {file = "SQLAlchemy-2.0.7-py3-none-any.whl", hash = "sha256:fc67667c8e8c04e5c3250ab2cd51df40bc7c28c7c253d0475b377eff86fe4bb0"}, + {file = "SQLAlchemy-2.0.7.tar.gz", hash = "sha256:a4c1e1582492c66dfacc9eab52738f3e64d9a2a380e412668f75aa06e540f649"}, ] -sqlalchemy-stubs = [] stevedore = [ - {file = "stevedore-4.0.1-py3-none-any.whl", hash = "sha256:01645addb67beff04c7cfcbb0a6af8327d2efc3380b0f034aa316d4576c4d470"}, - {file = "stevedore-4.0.1.tar.gz", hash = "sha256:9a23111a6e612270c591fd31ff3321c6b5f3d5f3dabb1427317a5ab608fc261a"}, + {file = "stevedore-5.0.0-py3-none-any.whl", hash = "sha256:bd5a71ff5e5e5f5ea983880e4a1dd1bb47f8feebbb3d95b592398e2f02194771"}, + {file = "stevedore-5.0.0.tar.gz", hash = "sha256:2c428d2338976279e8eb2196f7a94910960d9f7ba2f41f3988511e95ca447021"}, ] swagger-ui-bundle = [ {file = "swagger_ui_bundle-0.0.9-py3-none-any.whl", hash = "sha256:cea116ed81147c345001027325c1ddc9ca78c1ee7319935c3c75d3669279d575"}, {file = "swagger_ui_bundle-0.0.9.tar.gz", hash = "sha256:b462aa1460261796ab78fd4663961a7f6f347ce01760f1303bbbdf630f11f516"}, ] tokenize-rt = [ - {file = "tokenize_rt-4.2.1-py2.py3-none-any.whl", hash = "sha256:08a27fa032a81cf45e8858d0ac706004fcd523e8463415ddf1442be38e204ea8"}, - {file = "tokenize_rt-4.2.1.tar.gz", hash = "sha256:0d4f69026fed520f8a1e0103aa36c406ef4661417f20ca643f913e33531b3b94"}, + {file = "tokenize_rt-5.0.0-py2.py3-none-any.whl", hash = "sha256:c67772c662c6b3dc65edf66808577968fb10badfc2042e3027196bed4daf9e5a"}, + {file = "tokenize_rt-5.0.0.tar.gz", hash = "sha256:3160bc0c3e8491312d0485171dea861fc160a240f5f5766b72a1165408d10740"}, ] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, @@ -3593,8 +3650,8 @@ tomli = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] tomlkit = [ - {file = "tomlkit-0.11.6-py3-none-any.whl", hash = "sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b"}, - {file = "tomlkit-0.11.6.tar.gz", hash = "sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73"}, + {file = "tomlkit-0.11.7-py3-none-any.whl", hash = "sha256:5325463a7da2ef0c6bbfefb62a3dc883aebe679984709aee32a317907d0a8d3c"}, + {file = "tomlkit-0.11.7.tar.gz", hash = "sha256:f392ef70ad87a672f02519f99967d28a4d3047133e2d1df936511465fbb3791d"}, ] tornado = [ {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, @@ -3618,152 +3675,163 @@ types-click = [ {file = "types_click-7.1.8-py3-none-any.whl", hash = "sha256:8cb030a669e2e927461be9827375f83c16b8178c365852c060a34e24871e7e81"}, ] types-dateparser = [ - {file = "types-dateparser-1.1.4.1.tar.gz", hash = "sha256:0f76578bbae15c8b8701b5efd94db98a97ce0a27aedfe6f14a531170de6db97d"}, - {file = "types_dateparser-1.1.4.1-py3-none-any.whl", hash = "sha256:dd7b2343bb06225c0e358533609b66a8edfb95e5426d8f658664e7d0f27dea68"}, + {file = "types-dateparser-1.1.4.9.tar.gz", hash = "sha256:506668f024c2136a44e9046ee18dd4279a55df1be5dc55e5c29ab07643a2e18a"}, + {file = "types_dateparser-1.1.4.9-py3-none-any.whl", hash = "sha256:6539e49032151a8445092109f93e61f51b2082a9f295691df13e073c6abf9137"}, ] -types-Flask = [ +types-flask = [ {file = "types-Flask-1.1.6.tar.gz", hash = "sha256:aac777b3abfff9436e6b01f6d08171cf23ea6e5be71cbf773aaabb1c5763e9cf"}, {file = "types_Flask-1.1.6-py3-none-any.whl", hash = "sha256:6ab8a9a5e258b76539d652f6341408867298550b19b81f0e41e916825fc39087"}, ] -types-Jinja2 = [ +types-jinja2 = [ {file = "types-Jinja2-2.11.9.tar.gz", hash = "sha256:dbdc74a40aba7aed520b7e4d89e8f0fe4286518494208b35123bcf084d4b8c81"}, {file = "types_Jinja2-2.11.9-py3-none-any.whl", hash = "sha256:60a1e21e8296979db32f9374d8a239af4cb541ff66447bb915d8ad398f9c63b2"}, ] -types-MarkupSafe = [ +types-markupsafe = [ {file = "types-MarkupSafe-1.1.10.tar.gz", hash = "sha256:85b3a872683d02aea3a5ac2a8ef590193c344092032f58457287fbf8e06711b1"}, {file = "types_MarkupSafe-1.1.10-py3-none-any.whl", hash = "sha256:ca2bee0f4faafc45250602567ef38d533e877d2ddca13003b319c551ff5b3cc5"}, ] types-pytz = [ - {file = "types-pytz-2022.5.0.0.tar.gz", hash = "sha256:0c163b15d3e598e6cc7074a99ca9ec72b25dc1b446acc133b827667af0b7b09a"}, - {file = "types_pytz-2022.5.0.0-py3-none-any.whl", hash = "sha256:a8e1fe6a1b270fbfaf2553b20ad0f1316707cc320e596da903bb17d7373fed2d"}, + {file = "types-pytz-2022.7.1.2.tar.gz", hash = "sha256:487d3e8e9f4071eec8081746d53fa982bbc05812e719dcbf2ebf3d55a1a4cd28"}, + {file = "types_pytz-2022.7.1.2-py3-none-any.whl", hash = "sha256:40ca448a928d566f7d44ddfde0066e384f7ffbd4da2778e42a4570eaca572446"}, ] -types-PyYAML = [ - {file = "types-PyYAML-6.0.12.tar.gz", hash = "sha256:f6f350418125872f3f0409d96a62a5a5ceb45231af5cc07ee0034ec48a3c82fa"}, - {file = "types_PyYAML-6.0.12-py3-none-any.whl", hash = "sha256:29228db9f82df4f1b7febee06bbfb601677882e98a3da98132e31c6874163e15"}, +types-pyyaml = [ + {file = "types-PyYAML-6.0.12.9.tar.gz", hash = "sha256:c51b1bd6d99ddf0aa2884a7a328810ebf70a4262c292195d3f4f9a0005f9eeb6"}, + {file = "types_PyYAML-6.0.12.9-py3-none-any.whl", hash = "sha256:5aed5aa66bd2d2e158f75dda22b059570ede988559f030cf294871d3b647e3e8"}, ] types-requests = [ - {file = "types-requests-2.28.11.2.tar.gz", hash = "sha256:fdcd7bd148139fb8eef72cf4a41ac7273872cad9e6ada14b11ff5dfdeee60ed3"}, - {file = "types_requests-2.28.11.2-py3-none-any.whl", hash = "sha256:14941f8023a80b16441b3b46caffcbfce5265fd14555844d6029697824b5a2ef"}, + {file = "types-requests-2.28.11.17.tar.gz", hash = "sha256:0d580652ce903f643f8c3b494dd01d29367ea57cea0c7ad7f65cf3169092edb0"}, + {file = "types_requests-2.28.11.17-py3-none-any.whl", hash = "sha256:cc1aba862575019306b2ed134eb1ea994cab1c887a22e18d3383e6dd42e9789b"}, ] types-urllib3 = [ - {file = "types-urllib3-1.26.25.1.tar.gz", hash = "sha256:a948584944b2412c9a74b9cf64f6c48caf8652cb88b38361316f6d15d8a184cd"}, - {file = "types_urllib3-1.26.25.1-py3-none-any.whl", hash = "sha256:f6422596cc9ee5fdf68f9d547f541096a20c2dcfd587e37c804c9ea720bf5cb2"}, + {file = "types-urllib3-1.26.25.10.tar.gz", hash = "sha256:c44881cde9fc8256d05ad6b21f50c4681eb20092552351570ab0a8a0653286d6"}, + {file = "types_urllib3-1.26.25.10-py3-none-any.whl", hash = "sha256:12c744609d588340a07e45d333bf870069fc8793bcf96bae7a96d4712a42591d"}, ] -types-Werkzeug = [ +types-werkzeug = [ {file = "types-Werkzeug-1.0.9.tar.gz", hash = "sha256:5cc269604c400133d452a40cee6397655f878fc460e03fde291b9e3a5eaa518c"}, {file = "types_Werkzeug-1.0.9-py3-none-any.whl", hash = "sha256:194bd5715a13c598f05c63e8a739328657590943bce941e8a3619a6b5d4a54ec"}, ] typing-extensions = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, + {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, + {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, ] tzdata = [ - {file = "tzdata-2022.5-py2.py3-none-any.whl", hash = "sha256:323161b22b7802fdc78f20ca5f6073639c64f1a7227c40cd3e19fd1d0ce6650a"}, - {file = "tzdata-2022.5.tar.gz", hash = "sha256:e15b2b3005e2546108af42a0eb4ccab4d9e225e2dfbf4f77aad50c70a4b1f3ab"}, + {file = "tzdata-2023.2-py2.py3-none-any.whl", hash = "sha256:905ae9e6744dd9ef5ce94d2aaa2dd00282fee38b670b2133407f23c388f110a1"}, + {file = "tzdata-2023.2.tar.gz", hash = "sha256:c3b51b235b07f9f1889089c2264bcbeaaba260a63f89bea09e350ea4205eb95f"}, ] tzlocal = [ - {file = "tzlocal-4.2-py3-none-any.whl", hash = "sha256:89885494684c929d9191c57aa27502afc87a579be5cdd3225c77c463ea043745"}, - {file = "tzlocal-4.2.tar.gz", hash = "sha256:ee5842fa3a795f023514ac2d801c4a81d1743bbe642e3940143326b3a00addd7"}, + {file = "tzlocal-4.3-py3-none-any.whl", hash = "sha256:b44c4388f3d34f25862cfbb387578a4d70fec417649da694a132f628a23367e2"}, + {file = "tzlocal-4.3.tar.gz", hash = "sha256:3f21d09e1b2aa9f2dacca12da240ca37de3ba5237a93addfd6d593afe9073355"}, ] -Unidecode = [ +unidecode = [ {file = "Unidecode-1.3.6-py3-none-any.whl", hash = "sha256:547d7c479e4f377b430dd91ac1275d593308dce0fc464fb2ab7d41f82ec653be"}, {file = "Unidecode-1.3.6.tar.gz", hash = "sha256:fed09cf0be8cf415b391642c2a5addfc72194407caee4f98719e40ec2a72b830"}, ] urllib3 = [ - {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, - {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, ] vine = [ {file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"}, {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, ] virtualenv = [ - {file = "virtualenv-20.16.5-py3-none-any.whl", hash = "sha256:d07dfc5df5e4e0dbc92862350ad87a36ed505b978f6c39609dc489eadd5b0d27"}, - {file = "virtualenv-20.16.5.tar.gz", hash = "sha256:227ea1b9994fdc5ea31977ba3383ef296d7472ea85be9d6732e42a91c04e80da"}, + {file = "virtualenv-20.21.0-py3-none-any.whl", hash = "sha256:31712f8f2a17bd06234fa97fdf19609e789dd4e3e4bf108c3da71d710651adbc"}, + {file = "virtualenv-20.21.0.tar.gz", hash = "sha256:f50e3e60f990a0757c9b68333c9fdaa72d7188caa417f96af9e52407831a3b68"}, ] wcwidth = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, + {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, + {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, ] werkzeug = [ {file = "Werkzeug-2.2.3-py3-none-any.whl", hash = "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612"}, {file = "Werkzeug-2.2.3.tar.gz", hash = "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe"}, ] wrapt = [ - {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, - {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, - {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, - {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, - {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, - {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, - {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, - {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, - {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, - {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, - {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, - {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, - {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, - {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, - {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, - {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, + {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"}, + {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"}, + {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"}, + {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"}, + {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"}, + {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"}, + {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"}, + {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"}, + {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"}, + {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"}, + {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"}, + {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"}, + {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"}, + {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"}, + {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"}, + {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"}, + {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"}, + {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, + {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, ] -WTForms = [ +wtforms = [ {file = "WTForms-3.0.1-py3-none-any.whl", hash = "sha256:837f2f0e0ca79481b92884962b914eba4e72b7a2daaf1f939c890ed0124b834b"}, {file = "WTForms-3.0.1.tar.gz", hash = "sha256:6b351bbb12dd58af57ffef05bc78425d08d1914e0fd68ee14143b7ade023c5bc"}, ] xdoctest = [ - {file = "xdoctest-1.1.0-py3-none-any.whl", hash = "sha256:da330c4dacee51f3c785820bc743188fb6f7c64c5fa1c54bff8836b3cf23d69b"}, - {file = "xdoctest-1.1.0.tar.gz", hash = "sha256:0fd4fad7932f0a2f082dfdfb857dd6ca41603757586c39b1e5b4d333fc389f8a"}, + {file = "xdoctest-1.1.1-py3-none-any.whl", hash = "sha256:d59d4ed91cb92e4430ef0ad1b134a2bef02adff7d2fb9c9f057547bee44081a2"}, + {file = "xdoctest-1.1.1.tar.gz", hash = "sha256:2eac8131bdcdf2781b4e5a62d6de87f044b730cc8db8af142a51bb29c245e779"}, ] zipp = [ - {file = "zipp-3.10.0-py3-none-any.whl", hash = "sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1"}, - {file = "zipp-3.10.0.tar.gz", hash = "sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8"}, + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, ] diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index 46ebc435..4504b729 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -49,7 +49,6 @@ setuptools = "^65.5.1" connexion = {extras = [ "swagger-ui",], version = "^2"} lxml = "^4.9.1" marshmallow-enum = "^1.5.1" -marshmallow-sqlalchemy = "^0.28.0" PyJWT = "^2.6.0" gunicorn = "^20.1.0" APScheduler = "*" @@ -73,7 +72,7 @@ types-pytz = "^2022.1.1" # sqlalchemy-stubs = { git = "https://github.com/dropbox/sqlalchemy-stubs.git", rev = "master" } # sqlalchemy-stubs = {develop = true, path = "/Users/kevin/projects/github/sqlalchemy-stubs"} # for now use my fork -sqlalchemy-stubs = { git = "https://github.com/burnettk/sqlalchemy-stubs.git", rev = "scoped-session-delete" } +# sqlalchemy-stubs = { git = "https://github.com/burnettk/sqlalchemy-stubs.git", rev = "scoped-session-delete" } simplejson = "^3.17.6" pytz = "^2022.6" dateparser = "^1.1.2" @@ -83,6 +82,7 @@ pylint = "^2.15.10" flask-simple-crypt = "^0.3.3" cryptography = "^39.0.2" safety = "^2.3.5" +sqlalchemy = "^2.0.7" [tool.poetry.dev-dependencies] diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py index 68f16ddf..57061497 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py @@ -44,8 +44,9 @@ class MyJSONEncoder(DefaultJSONProvider): return obj.serialized elif isinstance(obj, sqlalchemy.engine.row.Row): # type: ignore return_dict = {} - for row_key in obj.keys(): - row_value = obj[row_key] + row_mapping = obj._mapping + for row_key in row_mapping.keys(): + row_value = row_mapping[row_key] if hasattr(row_value, "serialized"): return_dict.update(row_value.serialized) elif hasattr(row_value, "__dict__"): diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py index 3fb8b439..a67b7d5a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py @@ -53,6 +53,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): """ProcessInstanceModel.""" __tablename__ = "process_instance" + __allow_unmapped__ = True id: int = db.Column(db.Integer, primary_key=True) process_model_identifier: str = db.Column(db.String(255), nullable=False, index=True) process_model_display_name: str = db.Column(db.String(255), nullable=False, index=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py index ade1f60d..ad2041cb 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py @@ -8,7 +8,6 @@ from typing import Optional from typing import TypedDict from sqlalchemy import ForeignKey -from sqlalchemy.orm import deferred from sqlalchemy.orm import relationship from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( @@ -69,7 +68,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel): id: int = db.Column(db.Integer, primary_key=True) identifier: str = db.Column(db.String(50), nullable=False, index=True) - report_metadata: dict = deferred(db.Column(db.JSON)) # type: ignore + report_metadata: dict = db.Column(db.JSON) created_by_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore created_by = relationship("UserModel") created_at_in_seconds = db.Column(db.Integer) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index a1edd259..bc2fcff3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -47,6 +47,7 @@ class MultiInstanceType(enum.Enum): @dataclass class TaskModel(SpiffworkflowBaseDBModel): __tablename__ = "task" + __allow_unmapped__ = True id: int = db.Column(db.Integer, primary_key=True) guid: str = db.Column(db.String(36), nullable=False, unique=True) bpmn_process_id: int = db.Column(ForeignKey(BpmnProcessModel.id), nullable=False, index=True) # type: ignore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index 0f62a738..cbf25bb6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -309,11 +309,14 @@ class ProcessInstanceReportService: ) -> list[dict]: """Add_metadata_columns_to_process_instance.""" results = [] - for process_instance in process_instance_sqlalchemy_rows: - process_instance_dict = process_instance["ProcessInstanceModel"].serialized + for process_instance_row in process_instance_sqlalchemy_rows: + process_instance_mapping = process_instance_row._mapping + process_instance_dict = process_instance_row[0].serialized for metadata_column in metadata_columns: if metadata_column["accessor"] not in process_instance_dict: - process_instance_dict[metadata_column["accessor"]] = process_instance[metadata_column["accessor"]] + process_instance_dict[metadata_column["accessor"]] = process_instance_mapping[ + metadata_column["accessor"] + ] results.append(process_instance_dict) return results diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 4d933418..a9ac757c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -1,4 +1,3 @@ -import logging import time from typing import Callable from typing import Optional @@ -280,10 +279,6 @@ class WorkflowExecutionService: finally: self.execution_strategy.save() - spiff_logger = logging.getLogger("spiff") - for handler in spiff_logger.handlers: - if hasattr(handler, "bulk_insert_logs"): - handler.bulk_insert_logs() # type: ignore db.session.commit() if save: From 438d44592fe2e8e43d50daded89fb7421bc5ac15 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 28 Mar 2023 15:56:57 -0400 Subject: [PATCH 122/162] run safety in ci w/ burnettk --- .github/workflows/backend_tests.yml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/.github/workflows/backend_tests.yml b/.github/workflows/backend_tests.yml index c09da5e5..517f6297 100644 --- a/.github/workflows/backend_tests.yml +++ b/.github/workflows/backend_tests.yml @@ -16,10 +16,7 @@ jobs: fail-fast: false matrix: include: - # FIXME: https://github.com/mysql/mysql-connector-python/pull/86 - # put back when poetry update protobuf mysql-connector-python updates protobuf - # right now mysql is forcing protobuf to version 3 - # - { python: "3.11", os: "ubuntu-latest", session: "safety" } + - { python: "3.11", os: "ubuntu-latest", session: "safety" } - { python: "3.11", os: "ubuntu-latest", session: "mypy" } - { python: "3.10", os: "ubuntu-latest", session: "mypy" } - { python: "3.9", os: "ubuntu-latest", session: "mypy" } From 422947424fdc5dfede7d7f6469beca2685946534 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 28 Mar 2023 16:05:43 -0400 Subject: [PATCH 123/162] add back in sqlalchemy stubs w/ burnettk --- spiffworkflow-backend/poetry.lock | 22 +++++++++++++++++++--- spiffworkflow-backend/pyproject.toml | 1 + 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index 7181d042..d814b480 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1095,7 +1095,7 @@ python-versions = "*" name = "mypy" version = "1.1.1" description = "Optional static typing for Python" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -1114,7 +1114,7 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" +category = "main" optional = false python-versions = ">=3.5" @@ -1908,6 +1908,18 @@ postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3-binary"] +[[package]] +name = "sqlalchemy-stubs" +version = "0.4" +description = "SQLAlchemy stubs and mypy plugin" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +mypy = ">=0.790" +typing-extensions = ">=3.7.4" + [[package]] name = "stevedore" version = "5.0.0" @@ -2235,7 +2247,7 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.12" -content-hash = "9dfdc4d01b78fbbed6cbb827806df1202840c8b45b957b724a58216183090d94" +content-hash = "64eb6f0dce231c627af5b4a0a3940e199c67afd93b880c85d622336ab466be5c" [metadata.files] alabaster = [ @@ -3629,6 +3641,10 @@ sqlalchemy = [ {file = "SQLAlchemy-2.0.7-py3-none-any.whl", hash = "sha256:fc67667c8e8c04e5c3250ab2cd51df40bc7c28c7c253d0475b377eff86fe4bb0"}, {file = "SQLAlchemy-2.0.7.tar.gz", hash = "sha256:a4c1e1582492c66dfacc9eab52738f3e64d9a2a380e412668f75aa06e540f649"}, ] +sqlalchemy-stubs = [ + {file = "sqlalchemy-stubs-0.4.tar.gz", hash = "sha256:c665d6dd4482ef642f01027fa06c3d5e91befabb219dc71fc2a09e7d7695f7ae"}, + {file = "sqlalchemy_stubs-0.4-py3-none-any.whl", hash = "sha256:5eec7aa110adf9b957b631799a72fef396b23ff99fe296df726645d01e312aa5"}, +] stevedore = [ {file = "stevedore-5.0.0-py3-none-any.whl", hash = "sha256:bd5a71ff5e5e5f5ea983880e4a1dd1bb47f8feebbb3d95b592398e2f02194771"}, {file = "stevedore-5.0.0.tar.gz", hash = "sha256:2c428d2338976279e8eb2196f7a94910960d9f7ba2f41f3988511e95ca447021"}, diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index 4504b729..30eb77a5 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -83,6 +83,7 @@ flask-simple-crypt = "^0.3.3" cryptography = "^39.0.2" safety = "^2.3.5" sqlalchemy = "^2.0.7" +sqlalchemy-stubs = "^0.4" [tool.poetry.dev-dependencies] From 6b3c93a075dd959b249562086e986ec810aa27bf Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 28 Mar 2023 16:12:27 -0400 Subject: [PATCH 124/162] remove unnecessary libs from pyproject in root of arena w/ burnettk --- poetry.lock | 2916 +---------------- pyproject.toml | 131 - .../routes/process_instances_controller.py | 7 +- .../services/process_instance_processor.py | 17 +- .../services/workflow_execution_service.py | 17 +- 5 files changed, 142 insertions(+), 2946 deletions(-) diff --git a/poetry.lock b/poetry.lock index b71632be..c955b56f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,110 +1,17 @@ -[[package]] -name = "alabaster" -version = "0.7.12" -description = "A configurable sidebar-enabled Sphinx theme" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "alembic" -version = "1.8.1" -description = "A database migration tool for SQLAlchemy." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -Mako = "*" -SQLAlchemy = ">=1.3.0" - -[package.extras] -tz = ["python-dateutil"] - -[[package]] -name = "amqp" -version = "5.1.1" -description = "Low-level AMQP client for Python (fork of amqplib)." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -vine = ">=5.0.0" - -[[package]] -name = "aniso8601" -version = "9.0.1" -description = "A library for parsing ISO 8601 strings." -category = "main" -optional = false -python-versions = "*" - -[package.extras] -dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] - -[[package]] -name = "apscheduler" -version = "3.10.0" -description = "In-process task scheduler with Cron-like capabilities" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -pytz = "*" -setuptools = ">=0.7" -six = ">=1.4.0" -tzlocal = ">=2.0,<3.0.0 || >=4.0.0" - -[package.extras] -doc = ["sphinx", "sphinx-rtd-theme"] -gevent = ["gevent"] -mongodb = ["pymongo (>=3.0)"] -redis = ["redis (>=3.0)"] -rethinkdb = ["rethinkdb (>=2.4.0)"] -sqlalchemy = ["sqlalchemy (>=1.4)"] -testing = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-tornado5"] -tornado = ["tornado (>=4.3)"] -twisted = ["twisted"] -zookeeper = ["kazoo"] - -[[package]] -name = "astroid" -version = "2.12.12" -description = "An abstract syntax tree for Python with inference support." -category = "main" -optional = false -python-versions = ">=3.7.2" - -[package.dependencies] -lazy-object-proxy = ">=1.4.0" -wrapt = {version = ">=1.14,<2", markers = "python_version >= \"3.11\""} - [[package]] name = "attrs" -version = "22.1.0" +version = "22.2.0" description = "Classes Without Boilerplate" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] - -[[package]] -name = "babel" -version = "2.10.3" -description = "Internationalization utilities" -category = "main" +category = "dev" optional = false python-versions = ">=3.6" -[package.dependencies] -pytz = ">=2015.7" +[package.extras] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] [[package]] name = "bandit" @@ -125,44 +32,9 @@ test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", toml = ["toml"] yaml = ["PyYAML"] -[[package]] -name = "bcrypt" -version = "4.0.1" -description = "Modern password hashing for your software and your servers" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.extras] -tests = ["pytest (>=3.2.1,!=3.3.0)"] -typecheck = ["mypy"] - -[[package]] -name = "beautifulsoup4" -version = "4.11.1" -description = "Screen-scraping library" -category = "dev" -optional = false -python-versions = ">=3.6.0" - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -html5lib = ["html5lib"] -lxml = ["lxml"] - -[[package]] -name = "billiard" -version = "3.6.4.0" -description = "Python multiprocessing fork with improvements and bugfixes" -category = "main" -optional = false -python-versions = "*" - [[package]] name = "black" -version = "23.1a1" +version = "23.1.0" description = "The uncompromising code formatter." category = "dev" optional = false @@ -171,6 +43,7 @@ python-versions = ">=3.7" [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" +packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" @@ -180,73 +53,6 @@ d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] -[[package]] -name = "blinker" -version = "1.5" -description = "Fast, simple object-to-object and broadcast signaling" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "celery" -version = "5.2.7" -description = "Distributed Task Queue." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -billiard = ">=3.6.4.0,<4.0" -click = ">=8.0.3,<9.0" -click-didyoumean = ">=0.0.3" -click-plugins = ">=1.1.1" -click-repl = ">=0.2.0" -kombu = ">=5.2.3,<6.0" -pytz = ">=2021.3" -vine = ">=5.0.0,<6.0" - -[package.extras] -arangodb = ["pyArango (>=1.3.2)"] -auth = ["cryptography"] -azureblockblob = ["azure-storage-blob (==12.9.0)"] -brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] -cassandra = ["cassandra-driver (<3.21.0)"] -consul = ["python-consul2"] -cosmosdbsql = ["pydocumentdb (==2.3.2)"] -couchbase = ["couchbase (>=3.0.0)"] -couchdb = ["pycouchdb"] -django = ["Django (>=1.11)"] -dynamodb = ["boto3 (>=1.9.178)"] -elasticsearch = ["elasticsearch"] -eventlet = ["eventlet (>=0.32.0)"] -gevent = ["gevent (>=1.5.0)"] -librabbitmq = ["librabbitmq (>=1.5.0)"] -memcache = ["pylibmc"] -mongodb = ["pymongo[srv] (>=3.11.1)"] -msgpack = ["msgpack"] -pymemcache = ["python-memcached"] -pyro = ["pyro4"] -pytest = ["pytest-celery"] -redis = ["redis (>=3.4.1,!=4.0.0,!=4.0.1)"] -s3 = ["boto3 (>=1.9.125)"] -slmq = ["softlayer-messaging (>=1.0.3)"] -solar = ["ephem"] -sqlalchemy = ["sqlalchemy"] -sqs = ["kombu[sqs]"] -tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] -yaml = ["PyYAML (>=3.10)"] -zookeeper = ["kazoo (>=1.3.1)"] -zstd = ["zstandard"] - -[[package]] -name = "certifi" -version = "2022.9.24" -description = "Python package for providing Mozilla's CA Bundle." -category = "main" -optional = false -python-versions = ">=3.6" - [[package]] name = "cfgv" version = "3.3.1" @@ -255,152 +61,25 @@ category = "dev" optional = false python-versions = ">=3.6.1" -[[package]] -name = "charset-normalizer" -version = "2.1.1" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" -optional = false -python-versions = ">=3.6.0" - -[package.extras] -unicode-backport = ["unicodedata2"] - -[[package]] -name = "classify-imports" -version = "4.2.0" -description = "Utilities for refactoring imports in python-like syntax." -category = "dev" -optional = false -python-versions = ">=3.7" - [[package]] name = "click" version = "8.1.3" description = "Composable command line interface toolkit" -category = "main" +category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} -[[package]] -name = "click-didyoumean" -version = "0.3.0" -description = "Enables git-like *did-you-mean* feature in click" -category = "main" -optional = false -python-versions = ">=3.6.2,<4.0.0" - -[package.dependencies] -click = ">=7" - -[[package]] -name = "click-plugins" -version = "1.1.1" -description = "An extension module for click to enable registering CLI commands via setuptools entry-points." -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -click = ">=4.0" - -[package.extras] -dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] - -[[package]] -name = "click-repl" -version = "0.2.0" -description = "REPL plugin for Click" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -click = "*" -prompt-toolkit = "*" -six = "*" - -[[package]] -name = "clickclick" -version = "20.10.2" -description = "Click utility functions" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -click = ">=4.0" -PyYAML = ">=3.11" - [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -[[package]] -name = "configparser" -version = "5.3.0" -description = "Updated configparser from stdlib for earlier Pythons." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "types-backports"] - -[[package]] -name = "connexion" -version = "2.14.1" -description = "Connexion - API first applications with OpenAPI/Swagger and Flask" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -clickclick = ">=1.2,<21" -flask = ">=1.0.4,<3" -inflection = ">=0.3.1,<0.6" -itsdangerous = ">=0.24" -jsonschema = ">=2.5.1,<5" -packaging = ">=20" -PyYAML = ">=5.1,<7" -requests = ">=2.9.1,<3" -swagger-ui-bundle = {version = ">=0.0.2,<0.1", optional = true, markers = "extra == \"swagger-ui\""} -werkzeug = ">=1.0,<3" - -[package.extras] -aiohttp = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)"] -docs = ["sphinx-autoapi (==1.8.1)"] -flask = ["flask (>=1.0.4,<3)", "itsdangerous (>=0.24)"] -swagger-ui = ["swagger-ui-bundle (>=0.0.2,<0.1)"] -tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)", "aiohttp-remotes", "decorator (>=5,<6)", "flask (>=1.0.4,<3)", "itsdangerous (>=0.24)", "pytest (>=6,<7)", "pytest-aiohttp", "pytest-cov (>=2,<3)", "swagger-ui-bundle (>=0.0.2,<0.1)", "testfixtures (>=6,<7)"] - -[[package]] -name = "coverage" -version = "6.5.0" -description = "Code coverage measurement for Python" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "darglint" -version = "1.8.1" -description = "A utility for ensuring Google-style docstrings stay up to date with the source code." -category = "dev" -optional = false -python-versions = ">=3.6,<4.0" - [[package]] name = "distlib" version = "0.3.6" @@ -413,52 +92,21 @@ python-versions = "*" name = "docutils" version = "0.19" description = "Docutils -- Python Documentation Utilities" -category = "main" +category = "dev" optional = false python-versions = ">=3.7" -[[package]] -name = "dparse" -version = "0.6.2" -description = "A parser for Python dependency files" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -packaging = "*" -toml = "*" - -[package.extras] -conda = ["pyyaml"] -pipenv = ["pipenv"] - -[[package]] -name = "ecdsa" -version = "0.18.0" -description = "ECDSA cryptographic signature library (pure python)" -category = "main" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - -[package.dependencies] -six = ">=1.9.0" - -[package.extras] -gmpy = ["gmpy"] -gmpy2 = ["gmpy2"] - [[package]] name = "filelock" -version = "3.8.0" +version = "3.10.7" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.2)", "diff-cover (>=7.5)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] name = "flake8" @@ -489,7 +137,7 @@ pycodestyle = "*" [[package]] name = "flake8-bugbear" -version = "22.10.27" +version = "22.12.6" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." category = "dev" optional = false @@ -504,11 +152,11 @@ dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"] [[package]] name = "flake8-docstrings" -version = "1.6.0" +version = "1.7.0" description = "Extension for flake8 which uses pydocstyle to check docstrings" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" [package.dependencies] flake8 = ">=3" @@ -538,198 +186,21 @@ flake8 = ">=3.0.0" pygments = "*" restructuredtext-lint = "*" -[[package]] -name = "flask" -version = "2.2.2" -description = "A simple framework for building complex web applications." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -click = ">=8.0" -itsdangerous = ">=2.0" -Jinja2 = ">=3.0" -Werkzeug = ">=2.2.2" - -[package.extras] -async = ["asgiref (>=3.2)"] -dotenv = ["python-dotenv"] - -[[package]] -name = "flask-admin" -version = "1.6.0" -description = "Simple and extensible admin interface framework for Flask" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -Flask = ">=0.7" -wtforms = "*" - -[package.extras] -aws = ["boto"] -azure = ["azure-storage-blob"] - -[[package]] -name = "flask-bcrypt" -version = "1.0.1" -description = "Brcrypt hashing for Flask." -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -bcrypt = ">=3.1.1" -Flask = "*" - -[[package]] -name = "flask-bpmn" -version = "0.0.0" -description = "Flask Bpmn" -category = "main" -optional = false -python-versions = "^3.7" -develop = false - -[package.dependencies] -click = "^8.0.1" -flask = "*" -flask-admin = "*" -flask-bcrypt = "*" -flask-cors = "*" -flask-mail = "*" -flask-marshmallow = "*" -flask-migrate = "*" -flask-restful = "*" -greenlet = "^2.0.1" -sentry-sdk = "*" -sphinx-autoapi = "^2.0.0" -spiffworkflow = "*" -werkzeug = "*" - -[package.source] -type = "git" -url = "https://github.com/sartography/flask-bpmn" -reference = "main" -resolved_reference = "c18306300f4312b8d36e0197fd6b62399180d0b1" - -[[package]] -name = "flask-cors" -version = "3.0.10" -description = "A Flask extension adding a decorator for CORS support" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -Flask = ">=0.9" -Six = "*" - -[[package]] -name = "flask-mail" -version = "0.9.1" -description = "Flask extension for sending email" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -blinker = "*" -Flask = "*" - -[[package]] -name = "flask-marshmallow" -version = "0.14.0" -description = "Flask + marshmallow for beautiful APIs" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -Flask = "*" -marshmallow = ">=2.0.0" -six = ">=1.9.0" - -[package.extras] -dev = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] -docs = ["Sphinx (==3.2.1)", "marshmallow-sqlalchemy (>=0.13.0)", "sphinx-issues (==1.2.0)"] -lint = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "pre-commit (>=2.4,<3.0)"] -sqlalchemy = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)"] -tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pytest"] - -[[package]] -name = "flask-migrate" -version = "3.1.0" -description = "SQLAlchemy database migrations for Flask applications using Alembic." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -alembic = ">=0.7" -Flask = ">=0.9" -Flask-SQLAlchemy = ">=1.0" - -[[package]] -name = "flask-restful" -version = "0.3.9" -description = "Simple framework for creating REST APIs" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -aniso8601 = ">=0.82" -Flask = ">=0.8" -pytz = "*" -six = ">=1.3.0" - -[package.extras] -docs = ["sphinx"] - -[[package]] -name = "flask-sqlalchemy" -version = "3.0.2" -description = "Add SQLAlchemy support to your Flask application." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -Flask = ">=2.2" -SQLAlchemy = ">=1.4.18" - -[[package]] -name = "furo" -version = "2022.9.29" -description = "A clean customisable Sphinx documentation theme." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -beautifulsoup4 = "*" -pygments = ">=2.7" -sphinx = ">=4.0,<6.0" -sphinx-basic-ng = "*" - [[package]] name = "gitdb" -version = "4.0.9" +version = "4.0.10" description = "Git Object Database" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.29" -description = "GitPython is a python library used to interact with Git repositories" +version = "3.1.31" +description = "GitPython is a Python library used to interact with Git repositories" category = "dev" optional = false python-versions = ">=3.7" @@ -737,38 +208,9 @@ python-versions = ">=3.7" [package.dependencies] gitdb = ">=4.0.1,<5" -[[package]] -name = "greenlet" -version = "2.0.1" -description = "Lightweight in-process concurrent programming" -category = "main" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" - -[package.extras] -docs = ["Sphinx", "docutils (<0.18)"] -test = ["faulthandler", "objgraph", "psutil"] - -[[package]] -name = "gunicorn" -version = "20.1.0" -description = "WSGI HTTP Server for UNIX" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -setuptools = ">=3.0" - -[package.extras] -eventlet = ["eventlet (>=0.24.1)"] -gevent = ["gevent (>=1.4.0)"] -setproctitle = ["setproctitle"] -tornado = ["tornado (>=0.2)"] - [[package]] name = "identify" -version = "2.5.7" +version = "2.5.22" description = "File identification library for Python" category = "dev" optional = false @@ -777,209 +219,6 @@ python-versions = ">=3.7" [package.extras] license = ["ukkonen"] -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "imagesize" -version = "1.4.1" -description = "Getting image size from png/jpeg/jpeg2000/gif file" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "inflection" -version = "0.5.1" -description = "A port of Ruby on Rails inflector to Python" -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "itsdangerous" -version = "2.1.2" -description = "Safely pass data to untrusted environments and back." -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "jinja2" -version = "3.1.2" -description = "A very fast and expressive template engine." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "jsonschema" -version = "4.16.0" -description = "An implementation of JSON Schema validation for Python" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -attrs = ">=17.4.0" -pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] - -[[package]] -name = "kombu" -version = "5.2.4" -description = "Messaging library for Python." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -amqp = ">=5.0.9,<6.0.0" -vine = "*" - -[package.extras] -azureservicebus = ["azure-servicebus (>=7.0.0)"] -azurestoragequeues = ["azure-storage-queue"] -consul = ["python-consul (>=0.6.0)"] -librabbitmq = ["librabbitmq (>=2.0.0)"] -mongodb = ["pymongo (>=3.3.0,<3.12.1)"] -msgpack = ["msgpack"] -pyro = ["pyro4"] -qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] -redis = ["redis (>=3.4.1,!=4.0.0,!=4.0.1)"] -slmq = ["softlayer-messaging (>=1.0.3)"] -sqlalchemy = ["sqlalchemy"] -sqs = ["boto3 (>=1.9.12)", "pycurl (>=7.44.1,<7.45.0)", "urllib3 (>=1.26.7)"] -yaml = ["PyYAML (>=3.10)"] -zookeeper = ["kazoo (>=1.3.1)"] - -[[package]] -name = "lazy-object-proxy" -version = "1.8.0" -description = "A fast and thorough lazy object proxy." -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "livereload" -version = "2.6.3" -description = "Python LiveReload is an awesome tool for web developers" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -six = "*" -tornado = {version = "*", markers = "python_version > \"2.7\""} - -[[package]] -name = "lxml" -version = "4.9.1" -description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" - -[package.extras] -cssselect = ["cssselect (>=0.7)"] -html5 = ["html5lib"] -htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=0.29.7)"] - -[[package]] -name = "mako" -version = "1.2.3" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -MarkupSafe = ">=0.9.2" - -[package.extras] -babel = ["Babel"] -lingua = ["lingua"] -testing = ["pytest"] - -[[package]] -name = "markupsafe" -version = "2.1.1" -description = "Safely add untrusted strings to HTML/XML markup." -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "marshmallow" -version = "3.18.0" -description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -packaging = ">=17.0" - -[package.extras] -dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "mypy (==0.971)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "mypy (==0.971)", "pre-commit (>=2.4,<3.0)"] -tests = ["pytest", "pytz", "simplejson"] - -[[package]] -name = "marshmallow-enum" -version = "1.5.1" -description = "Enum field for Marshmallow" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -marshmallow = ">=2.0.0" - -[[package]] -name = "marshmallow-sqlalchemy" -version = "0.28.1" -description = "SQLAlchemy integration with the marshmallow (de)serialization library" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -marshmallow = ">=3.0.0" -packaging = ">=21.3" -SQLAlchemy = ">=1.3.0" - -[package.extras] -dev = ["flake8 (==4.0.1)", "flake8-bugbear (==22.7.1)", "pre-commit (>=2.0,<3.0)", "pytest", "pytest-lazy-fixture (>=0.6.2)", "tox"] -docs = ["alabaster (==0.7.12)", "sphinx (==4.4.0)", "sphinx-issues (==3.0.1)"] -lint = ["flake8 (==4.0.1)", "flake8-bugbear (==22.7.1)", "pre-commit (>=2.0,<3.0)"] -tests = ["pytest", "pytest-lazy-fixture (>=0.6.2)"] - [[package]] name = "mccabe" version = "0.6.1" @@ -988,46 +227,13 @@ category = "dev" optional = false python-versions = "*" -[[package]] -name = "mypy" -version = "0.982" -description = "Optional static typing for Python" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -mypy-extensions = ">=0.4.3" -typing-extensions = ">=3.10" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -python2 = ["typed-ast (>=1.4.0,<2)"] -reports = ["lxml"] - [[package]] name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "main" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" optional = false -python-versions = "*" - -[[package]] -name = "mysql-connector-python" -version = "8.0.31" -description = "MySQL driver written in Python" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -protobuf = ">=3.11.0,<=3.20.1" - -[package.extras] -compression = ["lz4 (>=2.1.6,<=3.1.3)", "zstandard (>=0.12.0,<=0.15.2)"] -dns-srv = ["dnspython (>=1.16.0,<=2.1.0)"] -gssapi = ["gssapi (>=1.6.9,<=1.8.1)"] +python-versions = ">=3.5" [[package]] name = "nodeenv" @@ -1042,18 +248,15 @@ setuptools = "*" [[package]] name = "packaging" -version = "21.3" +version = "23.0" description = "Core utilities for Python packages" -category = "main" +category = "dev" optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" [[package]] name = "pathspec" -version = "0.10.1" +version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false @@ -1061,50 +264,27 @@ python-versions = ">=3.7" [[package]] name = "pbr" -version = "5.11.0" +version = "5.11.1" description = "Python Build Reasonableness" category = "dev" optional = false python-versions = ">=2.6" -[[package]] -name = "pep8-naming" -version = "0.13.2" -description = "Check PEP-8 naming conventions, plugin for flake8" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -flake8 = ">=3.9.1" - [[package]] name = "platformdirs" -version = "2.5.2" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "3.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] - -[[package]] -name = "pluggy" -version = "1.0.0" -description = "plugin and hook calling mechanisms for python" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pre-commit" -version = "2.20.0" +version = "2.21.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false @@ -1115,54 +295,7 @@ cfgv = ">=2.0.0" identify = ">=1.0.0" nodeenv = ">=0.11.1" pyyaml = ">=5.1" -toml = "*" -virtualenv = ">=20.0.8" - -[[package]] -name = "pre-commit-hooks" -version = "4.3.0" -description = "Some out-of-the-box hooks for pre-commit." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -"ruamel.yaml" = ">=0.15" - -[[package]] -name = "prompt-toolkit" -version = "3.0.31" -description = "Library for building powerful interactive command lines in Python" -category = "main" -optional = false -python-versions = ">=3.6.2" - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "protobuf" -version = "3.20.1" -description = "Protocol Buffers" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "psycopg2" -version = "2.9.5" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "pyasn1" -version = "0.4.8" -description = "ASN.1 types and codecs" -category = "main" -optional = false -python-versions = "*" +virtualenv = ">=20.10.0" [[package]] name = "pycodestyle" @@ -1174,17 +307,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pydocstyle" -version = "6.1.1" +version = "6.3.0" description = "Python docstring style checker" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] -snowballstemmer = "*" +snowballstemmer = ">=2.2.0" [package.extras] -toml = ["toml"] +toml = ["tomli (>=1.2.3)"] [[package]] name = "pyflakes" @@ -1196,238 +329,22 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.13.0" +version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." -category = "main" +category = "dev" optional = false python-versions = ">=3.6" [package.extras] plugins = ["importlib-metadata"] -[[package]] -name = "pyjwt" -version = "2.6.0" -description = "JSON Web Token implementation in Python" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] - -[[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" -optional = false -python-versions = ">=3.6.8" - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pyrsistent" -version = "0.18.1" -description = "Persistent/Functional/Immutable data structures" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "pytest" -version = "7.2.0" -description = "pytest: simple powerful testing with Python" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] - -[[package]] -name = "pytest-flask" -version = "1.2.0" -description = "A set of py.test fixtures to test Flask applications." -category = "main" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -Flask = "*" -pytest = ">=5.2" -Werkzeug = ">=0.7" - -[package.extras] -docs = ["Sphinx", "sphinx-rtd-theme"] - -[[package]] -name = "pytest-flask-sqlalchemy" -version = "1.1.0" -description = "A pytest plugin for preserving test isolation in Flask-SQlAlchemy using database transactions." -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -Flask-SQLAlchemy = ">=2.3" -packaging = ">=14.1" -pytest = ">=3.2.1" -pytest-mock = ">=1.6.2" -SQLAlchemy = ">=1.2.2" - -[package.extras] -tests = ["psycopg2-binary", "pytest (>=6.0.1)", "pytest-postgresql (>=2.4.0,<4.0.0)"] - -[[package]] -name = "pytest-mock" -version = "3.10.0" -description = "Thin-wrapper around the mock package for easier use with pytest" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -pytest = ">=5.0" - -[package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] - -[[package]] -name = "python-jose" -version = "3.3.0" -description = "JOSE implementation in Python" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -ecdsa = "!=0.15" -pyasn1 = "*" -rsa = "*" - -[package.extras] -cryptography = ["cryptography (>=3.4.0)"] -pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] -pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] - -[[package]] -name = "python-keycloak" -version = "2.6.0" -description = "python-keycloak is a Python package providing access to the Keycloak API." -category = "main" -optional = false -python-versions = ">=3.7,<4.0" - -[package.dependencies] -python-jose = ">=3.3.0,<4.0.0" -requests = ">=2.20.0,<3.0.0" -requests-toolbelt = ">=0.9.1,<0.10.0" -urllib3 = ">=1.26.0,<2.0.0" - -[package.extras] -docs = ["Sphinx (>=5.0.2,<6.0.0)", "alabaster (>=0.7.12,<0.8.0)", "commonmark (>=0.9.1,<0.10.0)", "m2r2 (>=0.3.2,<0.4.0)", "mock (>=4.0.3,<5.0.0)", "readthedocs-sphinx-ext (>=2.1.8,<3.0.0)", "recommonmark (>=0.7.1,<0.8.0)", "sphinx-autoapi (>=1.8.4,<2.0.0)", "sphinx-rtd-theme (>=1.0.0,<2.0.0)"] - -[[package]] -name = "pytz" -version = "2022.5" -description = "World timezone definitions, modern and historical" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "pytz-deprecation-shim" -version = "0.1.0.post0" -description = "Shims to make deprecation of pytz easier" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" - -[package.dependencies] -tzdata = {version = "*", markers = "python_version >= \"3.6\""} - -[[package]] -name = "pyupgrade" -version = "3.1.0" -description = "A tool to automatically upgrade syntax for newer versions." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -tokenize-rt = ">=3.2.0" - [[package]] name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "reorder-python-imports" -version = "3.9.0" -description = "Tool for reordering python imports" category = "dev" optional = false -python-versions = ">=3.7" - -[package.dependencies] -classify-imports = ">=4.1" - -[[package]] -name = "requests" -version = "2.28.1" -description = "Python HTTP for Humans." -category = "main" -optional = false -python-versions = ">=3.7, <4" - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-toolbelt" -version = "0.9.1" -description = "A utility belt for advanced users of python-requests" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -requests = ">=2.0.1,<3.0.0" - -[[package]] -name = "restrictedpython" -version = "6.0" -description = "RestrictedPython is a defined subset of the Python language which allows to provide a program input into a trusted environment." -category = "main" -optional = false -python-versions = ">=3.6, <3.12" - -[package.extras] -docs = ["Sphinx", "sphinx-rtd-theme"] -test = ["pytest", "pytest-mock"] +python-versions = ">=3.6" [[package]] name = "restructuredtext-lint" @@ -1440,110 +357,19 @@ python-versions = "*" [package.dependencies] docutils = ">=0.11,<1.0" -[[package]] -name = "rsa" -version = "4.9" -description = "Pure-Python RSA implementation" -category = "main" -optional = false -python-versions = ">=3.6,<4" - -[package.dependencies] -pyasn1 = ">=0.1.3" - -[[package]] -name = "ruamel-yaml" -version = "0.17.21" -description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -category = "dev" -optional = false -python-versions = ">=3" - -[package.extras] -docs = ["ryd"] -jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] - -[[package]] -name = "safety" -version = "2.3.1" -description = "Checks installed dependencies for known vulnerabilities and licenses." -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -Click = ">=8.0.2" -dparse = ">=0.6.2" -packaging = ">=21.0" -requests = "*" -"ruamel.yaml" = ">=0.17.21" -setuptools = ">=19.3" - -[package.extras] -github = ["jinja2 (>=3.1.0)", "pygithub (>=1.43.3)"] -gitlab = ["python-gitlab (>=1.3.0)"] - -[[package]] -name = "sentry-sdk" -version = "1.10.1" -description = "Python client for Sentry (https://sentry.io)" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -certifi = "*" -urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} - -[package.extras] -aiohttp = ["aiohttp (>=3.5)"] -beam = ["apache-beam (>=2.12)"] -bottle = ["bottle (>=0.12.13)"] -celery = ["celery (>=3)"] -chalice = ["chalice (>=1.16.0)"] -django = ["django (>=1.8)"] -falcon = ["falcon (>=1.4)"] -fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)"] -httpx = ["httpx (>=0.16.0)"] -pure-eval = ["asttokens", "executing", "pure-eval"] -pyspark = ["pyspark (>=2.4.4)"] -quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] -rq = ["rq (>=0.6)"] -sanic = ["sanic (>=0.8)"] -sqlalchemy = ["sqlalchemy (>=1.2)"] -starlette = ["starlette (>=0.19.1)"] -tornado = ["tornado (>=5)"] - [[package]] name = "setuptools" -version = "65.5.0" +version = "67.6.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" +category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] -[[package]] -name = "simplejson" -version = "3.17.6" -description = "Simple, fast, extensible JSON encoder/decoder for Python" -category = "main" -optional = false -python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" - [[package]] name = "smmap" version = "5.0.0" @@ -1556,257 +382,13 @@ python-versions = ">=3.6" name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "main" +category = "dev" optional = false python-versions = "*" -[[package]] -name = "soupsieve" -version = "2.3.2.post1" -description = "A modern CSS selector implementation for Beautiful Soup." -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "sphinx" -version = "5.3.0" -description = "Python documentation generator" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -alabaster = ">=0.7,<0.8" -babel = ">=2.9" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.14,<0.20" -imagesize = ">=1.3" -Jinja2 = ">=3.0" -packaging = ">=21.0" -Pygments = ">=2.12" -requests = ">=2.5.0" -snowballstemmer = ">=2.0" -sphinxcontrib-applehelp = "*" -sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = ">=2.0.0" -sphinxcontrib-jsmath = "*" -sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = ">=1.1.5" - -[package.extras] -docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] - -[[package]] -name = "sphinx-autoapi" -version = "2.0.0" -description = "Sphinx API documentation generator" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -astroid = ">=2.7" -Jinja2 = "*" -PyYAML = "*" -sphinx = ">=4.0" -unidecode = "*" - -[package.extras] -docs = ["sphinx", "sphinx-rtd-theme"] -dotnet = ["sphinxcontrib-dotnetdomain"] -go = ["sphinxcontrib-golangdomain"] - -[[package]] -name = "sphinx-autobuild" -version = "2021.3.14" -description = "Rebuild Sphinx documentation on changes, with live-reload in the browser." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -colorama = "*" -livereload = "*" -sphinx = "*" - -[package.extras] -test = ["pytest", "pytest-cov"] - -[[package]] -name = "sphinx-basic-ng" -version = "1.0.0b1" -description = "A modern skeleton for Sphinx themes." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -sphinx = ">=4.0" - -[package.extras] -docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"] - -[[package]] -name = "sphinx-click" -version = "4.4.0" -description = "Sphinx extension that automatically documents click applications" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -click = ">=7.0" -docutils = "*" -sphinx = ">=2.0" - -[[package]] -name = "sphinxcontrib-applehelp" -version = "1.0.2" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "1.0.2" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -category = "main" -optional = false -python-versions = ">=3.5" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "2.0.0" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["html5lib", "pytest"] - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -description = "A sphinx extension which renders display math in HTML via JavaScript" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.extras] -test = ["flake8", "mypy", "pytest"] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "1.0.3" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -category = "main" -optional = false -python-versions = ">=3.5" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "1.1.5" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -category = "main" -optional = false -python-versions = ">=3.5" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "SpiffWorkflow" -version = "1.2.1" -description = "A workflow framework and BPMN/DMN Processor" -category = "main" -optional = false -python-versions = "*" -develop = false - -[package.dependencies] -celery = "*" -configparser = "*" -lxml = "*" - -[package.source] -type = "git" -url = "https://github.com/sartography/SpiffWorkflow" -reference = "main" -resolved_reference = "f162aac43af3af18d1a55186aeccea154fb8b05d" - -[[package]] -name = "sqlalchemy" -version = "1.4.42" -description = "Database Abstraction Library" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} - -[package.extras] -aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] -mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "sqlalchemy-stubs" -version = "0.4" -description = "" -category = "main" -optional = false -python-versions = "*" -develop = false - -[package.dependencies] -mypy = ">=0.790" -typing-extensions = ">=3.7.4" - -[package.source] -type = "git" -url = "https://github.com/burnettk/sqlalchemy-stubs.git" -reference = "scoped-session-delete" -resolved_reference = "d1176931684ce5b327539cc9567d4a1cd8ef1efd" - [[package]] name = "stevedore" -version = "4.1.0" +version = "5.0.0" description = "Manage dynamic plugins for Python applications" category = "dev" optional = false @@ -1815,489 +397,76 @@ python-versions = ">=3.8" [package.dependencies] pbr = ">=2.0.0,<2.1.0 || >2.1.0" -[[package]] -name = "swagger-ui-bundle" -version = "0.0.9" -description = "swagger_ui_bundle - swagger-ui files in a pip package" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -Jinja2 = ">=2.0" - -[[package]] -name = "tokenize-rt" -version = "5.0.0" -description = "A wrapper around the stdlib `tokenize` which roundtrips." -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "tornado" -version = "6.2" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "dev" -optional = false -python-versions = ">= 3.7" - -[[package]] -name = "typeguard" -version = "2.13.3" -description = "Run-time type checker for Python" -category = "dev" -optional = false -python-versions = ">=3.5.3" - -[package.extras] -doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["mypy", "pytest", "typing-extensions"] - -[[package]] -name = "types-click" -version = "7.1.8" -description = "Typing stubs for click" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "types-flask" -version = "1.1.6" -description = "Typing stubs for Flask" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -types-click = "*" -types-Jinja2 = "*" -types-Werkzeug = "*" - -[[package]] -name = "types-jinja2" -version = "2.11.9" -description = "Typing stubs for Jinja2" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -types-MarkupSafe = "*" - -[[package]] -name = "types-markupsafe" -version = "1.1.10" -description = "Typing stubs for MarkupSafe" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "types-pytz" -version = "2022.5.0.0" -description = "Typing stubs for pytz" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "types-pyyaml" -version = "6.0.12.1" -description = "Typing stubs for PyYAML" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "types-requests" -version = "2.28.11.2" -description = "Typing stubs for requests" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -types-urllib3 = "<1.27" - -[[package]] -name = "types-urllib3" -version = "1.26.25.1" -description = "Typing stubs for urllib3" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "types-werkzeug" -version = "1.0.9" -description = "Typing stubs for Werkzeug" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "typing-extensions" -version = "4.4.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "tzdata" -version = "2022.5" -description = "Provider of IANA time zone data" -category = "main" -optional = false -python-versions = ">=2" - -[[package]] -name = "tzlocal" -version = "4.2" -description = "tzinfo object for the local timezone" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -pytz-deprecation-shim = "*" -tzdata = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -devenv = ["black", "pyroma", "pytest-cov", "zest.releaser"] -test = ["pytest (>=4.3)", "pytest-mock (>=3.3)"] - -[[package]] -name = "unidecode" -version = "1.3.6" -description = "ASCII transliterations of Unicode text" -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "urllib3" -version = "1.26.12" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "vine" -version = "5.0.0" -description = "Promises, promises, promises." -category = "main" -optional = false -python-versions = ">=3.6" - [[package]] name = "virtualenv" -version = "20.16.6" +version = "20.21.0" description = "Virtual Python Environment builder" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] distlib = ">=0.3.6,<1" filelock = ">=3.4.1,<4" -platformdirs = ">=2.4,<3" +platformdirs = ">=2.4,<4" [package.extras] -docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"] -testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "wcwidth" -version = "0.2.5" -description = "Measures the displayed width of unicode strings in a terminal" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "werkzeug" -version = "2.2.2" -description = "The comprehensive WSGI web application library." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -MarkupSafe = ">=2.1.1" - -[package.extras] -watchdog = ["watchdog"] - -[[package]] -name = "wrapt" -version = "1.14.1" -description = "Module for decorators, wrappers and monkey patching." -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[[package]] -name = "wtforms" -version = "3.0.1" -description = "Form validation and rendering for Python web development." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -MarkupSafe = "*" - -[package.extras] -email = ["email-validator"] - -[[package]] -name = "xdoctest" -version = "1.1.0" -description = "A rewrite of the builtin doctest module" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -colorama = {version = "*", optional = true, markers = "platform_system == \"Windows\" and extra == \"colors\""} -Pygments = {version = "*", optional = true, markers = "python_version >= \"3.5.0\" and extra == \"colors\""} -six = "*" - -[package.extras] -all = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "cmake", "codecov", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "six", "typing"] -all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "cmake (==3.21.2)", "codecov (==2.0.15)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "six (==1.11.0)", "typing (==3.7.4)"] -colors = ["Pygments", "Pygments", "colorama"] -jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert"] -optional = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "tomli"] -optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"] -runtime-strict = ["six (==1.11.0)"] -tests = ["cmake", "codecov", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "typing"] -tests-strict = ["cmake (==3.21.2)", "codecov (==2.0.15)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "typing (==3.7.4)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] [metadata] lock-version = "1.1" python-versions = ">=3.11,<3.12" -content-hash = "218d9e84c83ac2b9953fa5e18ee39879d2573fc749900887851be6d9ec32e63d" +content-hash = "b8148152aab04a6f436f9f815c998963dd371aa299736ec0ffb7aff89324bffc" [metadata.files] -alabaster = [ - {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, - {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, -] -alembic = [ - {file = "alembic-1.8.1-py3-none-any.whl", hash = "sha256:0a024d7f2de88d738d7395ff866997314c837be6104e90c5724350313dee4da4"}, - {file = "alembic-1.8.1.tar.gz", hash = "sha256:cd0b5e45b14b706426b833f06369b9a6d5ee03f826ec3238723ce8caaf6e5ffa"}, -] -amqp = [ - {file = "amqp-5.1.1-py3-none-any.whl", hash = "sha256:6f0956d2c23d8fa6e7691934d8c3930eadb44972cbbd1a7ae3a520f735d43359"}, - {file = "amqp-5.1.1.tar.gz", hash = "sha256:2c1b13fecc0893e946c65cbd5f36427861cffa4ea2201d8f6fca22e2a373b5e2"}, -] -aniso8601 = [ - {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, - {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, -] -apscheduler = [ - {file = "APScheduler-3.10.0-py3-none-any.whl", hash = "sha256:575299f20073c60a2cc9d4fa5906024cdde33c5c0ce6087c4e3c14be3b50fdd4"}, - {file = "APScheduler-3.10.0.tar.gz", hash = "sha256:a49fc23269218416f0e41890eea7a75ed6b284f10630dcfe866ab659621a3696"}, -] -astroid = [ - {file = "astroid-2.12.12-py3-none-any.whl", hash = "sha256:72702205200b2a638358369d90c222d74ebc376787af8fb2f7f2a86f7b5cc85f"}, - {file = "astroid-2.12.12.tar.gz", hash = "sha256:1c00a14f5a3ed0339d38d2e2e5b74ea2591df5861c0936bb292b84ccf3a78d83"}, -] attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, -] -babel = [ - {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, - {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, ] bandit = [ {file = "bandit-1.7.2-py3-none-any.whl", hash = "sha256:e20402cadfd126d85b68ed4c8862959663c8c372dbbb1fca8f8e2c9f55a067ec"}, {file = "bandit-1.7.2.tar.gz", hash = "sha256:6d11adea0214a43813887bfe71a377b5a9955e4c826c8ffd341b494e3ab25260"}, ] -bcrypt = [ - {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, - {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, - {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, - {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, -] -beautifulsoup4 = [ - {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, - {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, -] -billiard = [ - {file = "billiard-3.6.4.0-py3-none-any.whl", hash = "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b"}, - {file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"}, -] black = [ - {file = "black-23.1a1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fb7641d442ede92538bc70fa0201f884753a7d0f62f26c722b7b00301b95902"}, - {file = "black-23.1a1-cp310-cp310-win_amd64.whl", hash = "sha256:88288a645402106b8eb9f50d7340ae741e16240bb01c2eed8466549153daa96e"}, - {file = "black-23.1a1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4db1d8027ce7ae53f0ccf02b0be0b8808fefb291d6cb1543420f4165d96d364c"}, - {file = "black-23.1a1-cp311-cp311-win_amd64.whl", hash = "sha256:88ec25a64063945b4591b6378bead544c5d3260de1c93ad96f3ad2d76ddd76fd"}, - {file = "black-23.1a1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dff6f0157e47fbbeada046fca144b6557d3be2fb2602d668881cd179f04a352"}, - {file = "black-23.1a1-cp37-cp37m-win_amd64.whl", hash = "sha256:ca658b69260a18bf7aa0b0a6562dbbd304a737487d1318998aaca5a75901fd2c"}, - {file = "black-23.1a1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85dede655442f5e246e7abd667fe07e14916897ba52f3640b5489bf11f7dbf67"}, - {file = "black-23.1a1-cp38-cp38-win_amd64.whl", hash = "sha256:ddbf9da228726d46f45c29024263e160d41030a415097254817d65127012d1a2"}, - {file = "black-23.1a1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63330069d8ec909cf4e2c4d43a7f00aeb03335430ef9fec6cd2328e6ebde8a77"}, - {file = "black-23.1a1-cp39-cp39-win_amd64.whl", hash = "sha256:793c9176beb2adf295f6b863d9a4dc953fe2ac359ca3da108d71d14cb2c09e52"}, - {file = "black-23.1a1-py3-none-any.whl", hash = "sha256:e88e4b633d64b9e7adc4a6b922f52bb204af9f90d7b1e3317e6490f2b598b1ea"}, - {file = "black-23.1a1.tar.gz", hash = "sha256:0b945a5a1e5a5321f884de0061d5a8585d947c9b608e37b6d26ceee4dfdf4b62"}, -] -blinker = [ - {file = "blinker-1.5-py2.py3-none-any.whl", hash = "sha256:1eb563df6fdbc39eeddc177d953203f99f097e9bf0e2b8f9f3cf18b6ca425e36"}, - {file = "blinker-1.5.tar.gz", hash = "sha256:923e5e2f69c155f2cc42dafbbd70e16e3fde24d2d4aa2ab72fbe386238892462"}, -] -celery = [ - {file = "celery-5.2.7-py3-none-any.whl", hash = "sha256:138420c020cd58d6707e6257b6beda91fd39af7afde5d36c6334d175302c0e14"}, - {file = "celery-5.2.7.tar.gz", hash = "sha256:fafbd82934d30f8a004f81e8f7a062e31413a23d444be8ee3326553915958c6d"}, -] -certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"}, + {file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"}, + {file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"}, + {file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"}, + {file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"}, + {file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"}, + {file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"}, + {file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"}, + {file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"}, + {file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"}, + {file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"}, + {file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"}, + {file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"}, + {file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"}, ] cfgv = [ {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, ] -charset-normalizer = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, -] -classify-imports = [ - {file = "classify_imports-4.2.0-py2.py3-none-any.whl", hash = "sha256:dbbc264b70a470ed8c6c95976a11dfb8b7f63df44ed1af87328bbed2663f5161"}, - {file = "classify_imports-4.2.0.tar.gz", hash = "sha256:7abfb7ea92149b29d046bd34573d247ba6e68cc28100c801eba4af17964fc40e"}, -] click = [ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] -click-didyoumean = [ - {file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"}, - {file = "click_didyoumean-0.3.0-py3-none-any.whl", hash = "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667"}, -] -click-plugins = [ - {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, - {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, -] -click-repl = [ - {file = "click-repl-0.2.0.tar.gz", hash = "sha256:cd12f68d745bf6151210790540b4cb064c7b13e571bc64b6957d98d120dacfd8"}, - {file = "click_repl-0.2.0-py3-none-any.whl", hash = "sha256:94b3fbbc9406a236f176e0506524b2937e4b23b6f4c0c0b2a0a83f8a64e9194b"}, -] -clickclick = [ - {file = "clickclick-20.10.2-py2.py3-none-any.whl", hash = "sha256:c8f33e6d9ec83f68416dd2136a7950125bd256ec39ccc9a85c6e280a16be2bb5"}, - {file = "clickclick-20.10.2.tar.gz", hash = "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c"}, -] colorama = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -configparser = [ - {file = "configparser-5.3.0-py3-none-any.whl", hash = "sha256:b065779fd93c6bf4cee42202fa4351b4bb842e96a3fb469440e484517a49b9fa"}, - {file = "configparser-5.3.0.tar.gz", hash = "sha256:8be267824b541c09b08db124917f48ab525a6c3e837011f3130781a224c57090"}, -] -connexion = [ - {file = "connexion-2.14.1-py2.py3-none-any.whl", hash = "sha256:f343717241b4c4802a694c38fee66fb1693c897fe4ea5a957fa9b3b07caf6394"}, - {file = "connexion-2.14.1.tar.gz", hash = "sha256:99aa5781e70a7b94f8ffae8cf89f309d49cdb811bbd65a8e2f2546f3b19a01e6"}, -] -coverage = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, -] -darglint = [ - {file = "darglint-1.8.1-py3-none-any.whl", hash = "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d"}, - {file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"}, -] distlib = [ {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, @@ -2306,17 +475,9 @@ docutils = [ {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, ] -dparse = [ - {file = "dparse-0.6.2-py3-none-any.whl", hash = "sha256:8097076f1dd26c377f30d4745e6ec18fef42f3bf493933b842ac5bafad8c345f"}, - {file = "dparse-0.6.2.tar.gz", hash = "sha256:d45255bda21f998bc7ddf2afd5e62505ba6134756ba2d42a84c56b0826614dfe"}, -] -ecdsa = [ - {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"}, - {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"}, -] filelock = [ - {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, - {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, + {file = "filelock-3.10.7-py3-none-any.whl", hash = "sha256:bde48477b15fde2c7e5a0713cbe72721cb5a5ad32ee0b8f419907960b9d75536"}, + {file = "filelock-3.10.7.tar.gz", hash = "sha256:892be14aa8efc01673b5ed6589dbccb95f9a8596f0507e232626155495c18105"}, ] flake8 = [ {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, @@ -2326,12 +487,12 @@ flake8-bandit = [ {file = "flake8_bandit-2.1.2.tar.gz", hash = "sha256:687fc8da2e4a239b206af2e54a90093572a60d0954f3054e23690739b0b0de3b"}, ] flake8-bugbear = [ - {file = "flake8-bugbear-22.10.27.tar.gz", hash = "sha256:a6708608965c9e0de5fff13904fed82e0ba21ac929fe4896459226a797e11cd5"}, - {file = "flake8_bugbear-22.10.27-py3-none-any.whl", hash = "sha256:6ad0ab754507319060695e2f2be80e6d8977cfcea082293089a9226276bd825d"}, + {file = "flake8-bugbear-22.12.6.tar.gz", hash = "sha256:4cdb2c06e229971104443ae293e75e64c6107798229202fbe4f4091427a30ac0"}, + {file = "flake8_bugbear-22.12.6-py3-none-any.whl", hash = "sha256:b69a510634f8a9c298dfda2b18a8036455e6b19ecac4fe582e4d7a0abfa50a30"}, ] flake8-docstrings = [ - {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, - {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"}, + {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, + {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, ] flake8-polyfill = [ {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, @@ -2341,539 +502,65 @@ flake8-rst-docstrings = [ {file = "flake8-rst-docstrings-0.2.7.tar.gz", hash = "sha256:2740067ab9237559dd45a3434d8c987792c7b259ca563621a3b95efe201f5382"}, {file = "flake8_rst_docstrings-0.2.7-py3-none-any.whl", hash = "sha256:5d56075dce360bcc9c6775bfe7cb431aa395de600ca7e8d40580a28d50b2a803"}, ] -flask = [ - {file = "Flask-2.2.2-py3-none-any.whl", hash = "sha256:b9c46cc36662a7949f34b52d8ec7bb59c0d74ba08ba6cb9ce9adc1d8676d9526"}, - {file = "Flask-2.2.2.tar.gz", hash = "sha256:642c450d19c4ad482f96729bd2a8f6d32554aa1e231f4f6b4e7e5264b16cca2b"}, -] -flask-admin = [ - {file = "Flask-Admin-1.6.0.tar.gz", hash = "sha256:424ffc79b7b0dfff051555686ea12e86e48dffacac14beaa319fb4502ac40988"}, -] -flask-bcrypt = [ - {file = "Flask-Bcrypt-1.0.1.tar.gz", hash = "sha256:f07b66b811417ea64eb188ae6455b0b708a793d966e1a80ceec4a23bc42a4369"}, - {file = "Flask_Bcrypt-1.0.1-py3-none-any.whl", hash = "sha256:062fd991dc9118d05ac0583675507b9fe4670e44416c97e0e6819d03d01f808a"}, -] -flask-bpmn = [] -flask-cors = [ - {file = "Flask-Cors-3.0.10.tar.gz", hash = "sha256:b60839393f3b84a0f3746f6cdca56c1ad7426aa738b70d6c61375857823181de"}, - {file = "Flask_Cors-3.0.10-py2.py3-none-any.whl", hash = "sha256:74efc975af1194fc7891ff5cd85b0f7478be4f7f59fe158102e91abb72bb4438"}, -] -flask-mail = [ - {file = "Flask-Mail-0.9.1.tar.gz", hash = "sha256:22e5eb9a940bf407bcf30410ecc3708f3c56cc44b29c34e1726fe85006935f41"}, -] -flask-marshmallow = [ - {file = "flask-marshmallow-0.14.0.tar.gz", hash = "sha256:bd01a6372cbe50e36f205cfff0fc5dab0b7b662c4c8b2c4fc06a3151b2950950"}, - {file = "flask_marshmallow-0.14.0-py2.py3-none-any.whl", hash = "sha256:2adcd782b5a4a6c5ae3c96701f320d8ca6997995a52b2661093c56cc3ed24754"}, -] -flask-migrate = [ - {file = "Flask-Migrate-3.1.0.tar.gz", hash = "sha256:57d6060839e3a7f150eaab6fe4e726d9e3e7cffe2150fb223d73f92421c6d1d9"}, - {file = "Flask_Migrate-3.1.0-py3-none-any.whl", hash = "sha256:a6498706241aba6be7a251078de9cf166d74307bca41a4ca3e403c9d39e2f897"}, -] -flask-restful = [ - {file = "Flask-RESTful-0.3.9.tar.gz", hash = "sha256:ccec650b835d48192138c85329ae03735e6ced58e9b2d9c2146d6c84c06fa53e"}, - {file = "Flask_RESTful-0.3.9-py2.py3-none-any.whl", hash = "sha256:4970c49b6488e46c520b325f54833374dc2b98e211f1b272bd4b0c516232afe2"}, -] -flask-sqlalchemy = [ - {file = "Flask-SQLAlchemy-3.0.2.tar.gz", hash = "sha256:16199f5b3ddfb69e0df2f52ae4c76aedbfec823462349dabb21a1b2e0a2b65e9"}, - {file = "Flask_SQLAlchemy-3.0.2-py3-none-any.whl", hash = "sha256:7d0cd9cf73e64a996bb881a1ebd01633fc5a6d11c36ea27f7b5e251dc45476e7"}, -] -furo = [ - {file = "furo-2022.9.29-py3-none-any.whl", hash = "sha256:559ee17999c0f52728481dcf6b1b0cf8c9743e68c5e3a18cb45a7992747869a9"}, - {file = "furo-2022.9.29.tar.gz", hash = "sha256:d4238145629c623609c2deb5384f8d036e2a1ee2a101d64b67b4348112470dbd"}, -] gitdb = [ - {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, - {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, ] gitpython = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, -] -greenlet = [ - {file = "greenlet-2.0.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:9ed358312e63bf683b9ef22c8e442ef6c5c02973f0c2a939ec1d7b50c974015c"}, - {file = "greenlet-2.0.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4f09b0010e55bec3239278f642a8a506b91034f03a4fb28289a7d448a67f1515"}, - {file = "greenlet-2.0.1-cp27-cp27m-win32.whl", hash = "sha256:1407fe45246632d0ffb7a3f4a520ba4e6051fc2cbd61ba1f806900c27f47706a"}, - {file = "greenlet-2.0.1-cp27-cp27m-win_amd64.whl", hash = "sha256:3001d00eba6bbf084ae60ec7f4bb8ed375748f53aeaefaf2a37d9f0370558524"}, - {file = "greenlet-2.0.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d566b82e92ff2e09dd6342df7e0eb4ff6275a3f08db284888dcd98134dbd4243"}, - {file = "greenlet-2.0.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0722c9be0797f544a3ed212569ca3fe3d9d1a1b13942d10dd6f0e8601e484d26"}, - {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d37990425b4687ade27810e3b1a1c37825d242ebc275066cfee8cb6b8829ccd"}, - {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be35822f35f99dcc48152c9839d0171a06186f2d71ef76dc57fa556cc9bf6b45"}, - {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c140e7eb5ce47249668056edf3b7e9900c6a2e22fb0eaf0513f18a1b2c14e1da"}, - {file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d21681f09e297a5adaa73060737e3aa1279a13ecdcfcc6ef66c292cb25125b2d"}, - {file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fb412b7db83fe56847df9c47b6fe3f13911b06339c2aa02dcc09dce8bbf582cd"}, - {file = "greenlet-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6a08799e9e88052221adca55741bf106ec7ea0710bca635c208b751f0d5b617"}, - {file = "greenlet-2.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e112e03d37987d7b90c1e98ba5e1b59e1645226d78d73282f45b326f7bddcb9"}, - {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56961cfca7da2fdd178f95ca407fa330c64f33289e1804b592a77d5593d9bd94"}, - {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13ba6e8e326e2116c954074c994da14954982ba2795aebb881c07ac5d093a58a"}, - {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bf633a50cc93ed17e494015897361010fc08700d92676c87931d3ea464123ce"}, - {file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9f2c221eecb7ead00b8e3ddb913c67f75cba078fd1d326053225a3f59d850d72"}, - {file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:13ebf93c343dd8bd010cd98e617cb4c1c1f352a0cf2524c82d3814154116aa82"}, - {file = "greenlet-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:6f61d71bbc9b4a3de768371b210d906726535d6ca43506737682caa754b956cd"}, - {file = "greenlet-2.0.1-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:2d0bac0385d2b43a7bd1d651621a4e0f1380abc63d6fb1012213a401cbd5bf8f"}, - {file = "greenlet-2.0.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:f6327b6907b4cb72f650a5b7b1be23a2aab395017aa6f1adb13069d66360eb3f"}, - {file = "greenlet-2.0.1-cp35-cp35m-win32.whl", hash = "sha256:81b0ea3715bf6a848d6f7149d25bf018fd24554a4be01fcbbe3fdc78e890b955"}, - {file = "greenlet-2.0.1-cp35-cp35m-win_amd64.whl", hash = "sha256:38255a3f1e8942573b067510f9611fc9e38196077b0c8eb7a8c795e105f9ce77"}, - {file = "greenlet-2.0.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:04957dc96669be041e0c260964cfef4c77287f07c40452e61abe19d647505581"}, - {file = "greenlet-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:4aeaebcd91d9fee9aa768c1b39cb12214b30bf36d2b7370505a9f2165fedd8d9"}, - {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974a39bdb8c90a85982cdb78a103a32e0b1be986d411303064b28a80611f6e51"}, - {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dca09dedf1bd8684767bc736cc20c97c29bc0c04c413e3276e0962cd7aeb148"}, - {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4c0757db9bd08470ff8277791795e70d0bf035a011a528ee9a5ce9454b6cba2"}, - {file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5067920de254f1a2dee8d3d9d7e4e03718e8fd2d2d9db962c8c9fa781ae82a39"}, - {file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5a8e05057fab2a365c81abc696cb753da7549d20266e8511eb6c9d9f72fe3e92"}, - {file = "greenlet-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:3d75b8d013086b08e801fbbb896f7d5c9e6ccd44f13a9241d2bf7c0df9eda928"}, - {file = "greenlet-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:097e3dae69321e9100202fc62977f687454cd0ea147d0fd5a766e57450c569fd"}, - {file = "greenlet-2.0.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:cb242fc2cda5a307a7698c93173d3627a2a90d00507bccf5bc228851e8304963"}, - {file = "greenlet-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:72b00a8e7c25dcea5946692a2485b1a0c0661ed93ecfedfa9b6687bd89a24ef5"}, - {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"}, - {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"}, - {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"}, - {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"}, - {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"}, - {file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"}, - {file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"}, - {file = "greenlet-2.0.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:cd4ccc364cf75d1422e66e247e52a93da6a9b73cefa8cad696f3cbbb75af179d"}, - {file = "greenlet-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c8b1c43e75c42a6cafcc71defa9e01ead39ae80bd733a2608b297412beede68"}, - {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"}, - {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"}, - {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"}, - {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"}, - {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"}, - {file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"}, - {file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"}, - {file = "greenlet-2.0.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b1992ba9d4780d9af9726bbcef6a1db12d9ab1ccc35e5773685a24b7fb2758eb"}, - {file = "greenlet-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b5e83e4de81dcc9425598d9469a624826a0b1211380ac444c7c791d4a2137c19"}, - {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"}, - {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"}, - {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"}, - {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"}, - {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"}, - {file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"}, - {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, - {file = "greenlet-2.0.1.tar.gz", hash = "sha256:42e602564460da0e8ee67cb6d7236363ee5e131aa15943b6670e44e5c2ed0f67"}, -] -gunicorn = [ - {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, - {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, + {file = "GitPython-3.1.31-py3-none-any.whl", hash = "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d"}, + {file = "GitPython-3.1.31.tar.gz", hash = "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573"}, ] identify = [ - {file = "identify-2.5.7-py2.py3-none-any.whl", hash = "sha256:7a67b2a6208d390fd86fd04fb3def94a3a8b7f0bcbd1d1fcd6736f4defe26390"}, - {file = "identify-2.5.7.tar.gz", hash = "sha256:5b8fd1e843a6d4bf10685dd31f4520a7f1c7d0e14e9bc5d34b1d6f111cabc011"}, -] -idna = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] -imagesize = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, -] -inflection = [ - {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, - {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, -] -iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] -itsdangerous = [ - {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, - {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, -] -jinja2 = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, -] -jsonschema = [ - {file = "jsonschema-4.16.0-py3-none-any.whl", hash = "sha256:9e74b8f9738d6a946d70705dc692b74b5429cd0960d58e79ffecfc43b2221eb9"}, - {file = "jsonschema-4.16.0.tar.gz", hash = "sha256:165059f076eff6971bae5b742fc029a7b4ef3f9bcf04c14e4776a7605de14b23"}, -] -kombu = [ - {file = "kombu-5.2.4-py3-none-any.whl", hash = "sha256:8b213b24293d3417bcf0d2f5537b7f756079e3ea232a8386dcc89a59fd2361a4"}, - {file = "kombu-5.2.4.tar.gz", hash = "sha256:37cee3ee725f94ea8bb173eaab7c1760203ea53bbebae226328600f9d2799610"}, -] -lazy-object-proxy = [ - {file = "lazy-object-proxy-1.8.0.tar.gz", hash = "sha256:c219a00245af0f6fa4e95901ed28044544f50152840c5b6a3e7b2568db34d156"}, - {file = "lazy_object_proxy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4fd031589121ad46e293629b39604031d354043bb5cdf83da4e93c2d7f3389fe"}, - {file = "lazy_object_proxy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:b70d6e7a332eb0217e7872a73926ad4fdc14f846e85ad6749ad111084e76df25"}, - {file = "lazy_object_proxy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:eb329f8d8145379bf5dbe722182410fe8863d186e51bf034d2075eb8d85ee25b"}, - {file = "lazy_object_proxy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4e2d9f764f1befd8bdc97673261b8bb888764dfdbd7a4d8f55e4fbcabb8c3fb7"}, - {file = "lazy_object_proxy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:e20bfa6db17a39c706d24f82df8352488d2943a3b7ce7d4c22579cb89ca8896e"}, - {file = "lazy_object_proxy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:14010b49a2f56ec4943b6cf925f597b534ee2fe1f0738c84b3bce0c1a11ff10d"}, - {file = "lazy_object_proxy-1.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6850e4aeca6d0df35bb06e05c8b934ff7c533734eb51d0ceb2d63696f1e6030c"}, - {file = "lazy_object_proxy-1.8.0-cp37-cp37m-win32.whl", hash = "sha256:5b51d6f3bfeb289dfd4e95de2ecd464cd51982fe6f00e2be1d0bf94864d58acd"}, - {file = "lazy_object_proxy-1.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6f593f26c470a379cf7f5bc6db6b5f1722353e7bf937b8d0d0b3fba911998858"}, - {file = "lazy_object_proxy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c1c7c0433154bb7c54185714c6929acc0ba04ee1b167314a779b9025517eada"}, - {file = "lazy_object_proxy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:d176f392dbbdaacccf15919c77f526edf11a34aece58b55ab58539807b85436f"}, - {file = "lazy_object_proxy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:afcaa24e48bb23b3be31e329deb3f1858f1f1df86aea3d70cb5c8578bfe5261c"}, - {file = "lazy_object_proxy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:71d9ae8a82203511a6f60ca5a1b9f8ad201cac0fc75038b2dc5fa519589c9288"}, - {file = "lazy_object_proxy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:8f6ce2118a90efa7f62dd38c7dbfffd42f468b180287b748626293bf12ed468f"}, - {file = "lazy_object_proxy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:eac3a9a5ef13b332c059772fd40b4b1c3d45a3a2b05e33a361dee48e54a4dad0"}, - {file = "lazy_object_proxy-1.8.0-pp37-pypy37_pp73-any.whl", hash = "sha256:ae032743794fba4d171b5b67310d69176287b5bf82a21f588282406a79498891"}, - {file = "lazy_object_proxy-1.8.0-pp38-pypy38_pp73-any.whl", hash = "sha256:7e1561626c49cb394268edd00501b289053a652ed762c58e1081224c8d881cec"}, - {file = "lazy_object_proxy-1.8.0-pp39-pypy39_pp73-any.whl", hash = "sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8"}, -] -livereload = [ - {file = "livereload-2.6.3-py2.py3-none-any.whl", hash = "sha256:ad4ac6f53b2d62bb6ce1a5e6e96f1f00976a32348afedcb4b6d68df2a1d346e4"}, - {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, -] -lxml = [ - {file = "lxml-4.9.1-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:98cafc618614d72b02185ac583c6f7796202062c41d2eeecdf07820bad3295ed"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c62e8dd9754b7debda0c5ba59d34509c4688f853588d75b53c3791983faa96fc"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21fb3d24ab430fc538a96e9fbb9b150029914805d551deeac7d7822f64631dfc"}, - {file = "lxml-4.9.1-cp27-cp27m-win32.whl", hash = "sha256:86e92728ef3fc842c50a5cb1d5ba2bc66db7da08a7af53fb3da79e202d1b2cd3"}, - {file = "lxml-4.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4cfbe42c686f33944e12f45a27d25a492cc0e43e1dc1da5d6a87cbcaf2e95627"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dad7b164905d3e534883281c050180afcf1e230c3d4a54e8038aa5cfcf312b84"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a614e4afed58c14254e67862456d212c4dcceebab2eaa44d627c2ca04bf86837"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f9ced82717c7ec65a67667bb05865ffe38af0e835cdd78728f1209c8fffe0cad"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:d9fc0bf3ff86c17348dfc5d322f627d78273eba545db865c3cd14b3f19e57fa5"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e5f66bdf0976ec667fc4594d2812a00b07ed14d1b44259d19a41ae3fff99f2b8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fe17d10b97fdf58155f858606bddb4e037b805a60ae023c009f760d8361a4eb8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8caf4d16b31961e964c62194ea3e26a0e9561cdf72eecb1781458b67ec83423d"}, - {file = "lxml-4.9.1-cp310-cp310-win32.whl", hash = "sha256:4780677767dd52b99f0af1f123bc2c22873d30b474aa0e2fc3fe5e02217687c7"}, - {file = "lxml-4.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:b122a188cd292c4d2fcd78d04f863b789ef43aa129b233d7c9004de08693728b"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:be9eb06489bc975c38706902cbc6888f39e946b81383abc2838d186f0e8b6a9d"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f1be258c4d3dc609e654a1dc59d37b17d7fef05df912c01fc2e15eb43a9735f3"}, - {file = "lxml-4.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:927a9dd016d6033bc12e0bf5dee1dde140235fc8d0d51099353c76081c03dc29"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9232b09f5efee6a495a99ae6824881940d6447debe272ea400c02e3b68aad85d"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:04da965dfebb5dac2619cb90fcf93efdb35b3c6994fea58a157a834f2f94b318"}, - {file = "lxml-4.9.1-cp35-cp35m-win32.whl", hash = "sha256:4d5bae0a37af799207140652a700f21a85946f107a199bcb06720b13a4f1f0b7"}, - {file = "lxml-4.9.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4878e667ebabe9b65e785ac8da4d48886fe81193a84bbe49f12acff8f7a383a4"}, - {file = "lxml-4.9.1-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:1355755b62c28950f9ce123c7a41460ed9743c699905cbe664a5bcc5c9c7c7fb"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:bcaa1c495ce623966d9fc8a187da80082334236a2a1c7e141763ffaf7a405067"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eafc048ea3f1b3c136c71a86db393be36b5b3d9c87b1c25204e7d397cee9536"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:13c90064b224e10c14dcdf8086688d3f0e612db53766e7478d7754703295c7c8"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206a51077773c6c5d2ce1991327cda719063a47adc02bd703c56a662cdb6c58b"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e8f0c9d65da595cfe91713bc1222af9ecabd37971762cb830dea2fc3b3bb2acf"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8f0a4d179c9a941eb80c3a63cdb495e539e064f8054230844dcf2fcb812b71d3"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:830c88747dce8a3e7525defa68afd742b4580df6aa2fdd6f0855481e3994d391"}, - {file = "lxml-4.9.1-cp36-cp36m-win32.whl", hash = "sha256:1e1cf47774373777936c5aabad489fef7b1c087dcd1f426b621fda9dcc12994e"}, - {file = "lxml-4.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:5974895115737a74a00b321e339b9c3f45c20275d226398ae79ac008d908bff7"}, - {file = "lxml-4.9.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:1423631e3d51008871299525b541413c9b6c6423593e89f9c4cfbe8460afc0a2"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:2aaf6a0a6465d39b5ca69688fce82d20088c1838534982996ec46633dc7ad6cc"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:9f36de4cd0c262dd9927886cc2305aa3f2210db437aa4fed3fb4940b8bf4592c"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae06c1e4bc60ee076292e582a7512f304abdf6c70db59b56745cca1684f875a4"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:57e4d637258703d14171b54203fd6822fda218c6c2658a7d30816b10995f29f3"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6d279033bf614953c3fc4a0aa9ac33a21e8044ca72d4fa8b9273fe75359d5cca"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a60f90bba4c37962cbf210f0188ecca87daafdf60271f4c6948606e4dabf8785"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ca2264f341dd81e41f3fffecec6e446aa2121e0b8d026fb5130e02de1402785"}, - {file = "lxml-4.9.1-cp37-cp37m-win32.whl", hash = "sha256:27e590352c76156f50f538dbcebd1925317a0f70540f7dc8c97d2931c595783a"}, - {file = "lxml-4.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:eea5d6443b093e1545ad0210e6cf27f920482bfcf5c77cdc8596aec73523bb7e"}, - {file = "lxml-4.9.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f05251bbc2145349b8d0b77c0d4e5f3b228418807b1ee27cefb11f69ed3d233b"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:487c8e61d7acc50b8be82bda8c8d21d20e133c3cbf41bd8ad7eb1aaeb3f07c97"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d1a92d8e90b286d491e5626af53afef2ba04da33e82e30744795c71880eaa21"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:b570da8cd0012f4af9fa76a5635cd31f707473e65a5a335b186069d5c7121ff2"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ef87fca280fb15342726bd5f980f6faf8b84a5287fcc2d4962ea8af88b35130"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:93e414e3206779ef41e5ff2448067213febf260ba747fc65389a3ddaa3fb8715"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6653071f4f9bac46fbc30f3c7838b0e9063ee335908c5d61fb7a4a86c8fd2036"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:32a73c53783becdb7eaf75a2a1525ea8e49379fb7248c3eeefb9412123536387"}, - {file = "lxml-4.9.1-cp38-cp38-win32.whl", hash = "sha256:1a7c59c6ffd6ef5db362b798f350e24ab2cfa5700d53ac6681918f314a4d3b94"}, - {file = "lxml-4.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:1436cf0063bba7888e43f1ba8d58824f085410ea2025befe81150aceb123e345"}, - {file = "lxml-4.9.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:4beea0f31491bc086991b97517b9683e5cfb369205dac0148ef685ac12a20a67"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41fb58868b816c202e8881fd0f179a4644ce6e7cbbb248ef0283a34b73ec73bb"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bd34f6d1810d9354dc7e35158aa6cc33456be7706df4420819af6ed966e85448"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:edffbe3c510d8f4bf8640e02ca019e48a9b72357318383ca60e3330c23aaffc7"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d949f53ad4fc7cf02c44d6678e7ff05ec5f5552b235b9e136bd52e9bf730b91"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:079b68f197c796e42aa80b1f739f058dcee796dc725cc9a1be0cdb08fc45b000"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9c3a88d20e4fe4a2a4a84bf439a5ac9c9aba400b85244c63a1ab7088f85d9d25"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4e285b5f2bf321fc0857b491b5028c5f276ec0c873b985d58d7748ece1d770dd"}, - {file = "lxml-4.9.1-cp39-cp39-win32.whl", hash = "sha256:ef72013e20dd5ba86a8ae1aed7f56f31d3374189aa8b433e7b12ad182c0d2dfb"}, - {file = "lxml-4.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:10d2017f9150248563bb579cd0d07c61c58da85c922b780060dcc9a3aa9f432d"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0538747a9d7827ce3e16a8fdd201a99e661c7dee3c96c885d8ecba3c35d1032c"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0645e934e940107e2fdbe7c5b6fb8ec6232444260752598bc4d09511bd056c0b"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6daa662aba22ef3258934105be2dd9afa5bb45748f4f702a3b39a5bf53a1f4dc"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:603a464c2e67d8a546ddaa206d98e3246e5db05594b97db844c2f0a1af37cf5b"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c4b2e0559b68455c085fb0f6178e9752c4be3bba104d6e881eb5573b399d1eb2"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0f3f0059891d3254c7b5fb935330d6db38d6519ecd238ca4fce93c234b4a0f73"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c852b1530083a620cb0de5f3cd6826f19862bafeaf77586f1aef326e49d95f0c"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:287605bede6bd36e930577c5925fcea17cb30453d96a7b4c63c14a257118dbb9"}, - {file = "lxml-4.9.1.tar.gz", hash = "sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"}, -] -mako = [ - {file = "Mako-1.2.3-py3-none-any.whl", hash = "sha256:c413a086e38cd885088d5e165305ee8eed04e8b3f8f62df343480da0a385735f"}, - {file = "Mako-1.2.3.tar.gz", hash = "sha256:7fde96466fcfeedb0eed94f187f20b23d85e4cb41444be0e542e2c8c65c396cd"}, -] -markupsafe = [ - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, - {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, -] -marshmallow = [ - {file = "marshmallow-3.18.0-py3-none-any.whl", hash = "sha256:35e02a3a06899c9119b785c12a22f4cda361745d66a71ab691fd7610202ae104"}, - {file = "marshmallow-3.18.0.tar.gz", hash = "sha256:6804c16114f7fce1f5b4dadc31f4674af23317fcc7f075da21e35c1a35d781f7"}, -] -marshmallow-enum = [ - {file = "marshmallow-enum-1.5.1.tar.gz", hash = "sha256:38e697e11f45a8e64b4a1e664000897c659b60aa57bfa18d44e226a9920b6e58"}, - {file = "marshmallow_enum-1.5.1-py2.py3-none-any.whl", hash = "sha256:57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072"}, -] -marshmallow-sqlalchemy = [ - {file = "marshmallow-sqlalchemy-0.28.1.tar.gz", hash = "sha256:aa376747296780a56355e3067b9c8bf43a2a1c44ff985de82b3a5d9e161ca2b8"}, - {file = "marshmallow_sqlalchemy-0.28.1-py2.py3-none-any.whl", hash = "sha256:dbb061c19375eca3a7d18358d2ca8bbaee825fc3000a3f114e2698282362b536"}, + {file = "identify-2.5.22-py2.py3-none-any.whl", hash = "sha256:f0faad595a4687053669c112004178149f6c326db71ee999ae4636685753ad2f"}, + {file = "identify-2.5.22.tar.gz", hash = "sha256:f7a93d6cf98e29bd07663c60728e7a4057615068d7a639d132dc883b2d54d31e"}, ] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] -mypy = [ - {file = "mypy-0.982-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5085e6f442003fa915aeb0a46d4da58128da69325d8213b4b35cc7054090aed5"}, - {file = "mypy-0.982-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41fd1cf9bc0e1c19b9af13a6580ccb66c381a5ee2cf63ee5ebab747a4badeba3"}, - {file = "mypy-0.982-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f793e3dd95e166b66d50e7b63e69e58e88643d80a3dcc3bcd81368e0478b089c"}, - {file = "mypy-0.982-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86ebe67adf4d021b28c3f547da6aa2cce660b57f0432617af2cca932d4d378a6"}, - {file = "mypy-0.982-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:175f292f649a3af7082fe36620369ffc4661a71005aa9f8297ea473df5772046"}, - {file = "mypy-0.982-cp310-cp310-win_amd64.whl", hash = "sha256:8ee8c2472e96beb1045e9081de8e92f295b89ac10c4109afdf3a23ad6e644f3e"}, - {file = "mypy-0.982-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58f27ebafe726a8e5ccb58d896451dd9a662a511a3188ff6a8a6a919142ecc20"}, - {file = "mypy-0.982-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6af646bd46f10d53834a8e8983e130e47d8ab2d4b7a97363e35b24e1d588947"}, - {file = "mypy-0.982-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7aeaa763c7ab86d5b66ff27f68493d672e44c8099af636d433a7f3fa5596d40"}, - {file = "mypy-0.982-cp37-cp37m-win_amd64.whl", hash = "sha256:724d36be56444f569c20a629d1d4ee0cb0ad666078d59bb84f8f887952511ca1"}, - {file = "mypy-0.982-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14d53cdd4cf93765aa747a7399f0961a365bcddf7855d9cef6306fa41de01c24"}, - {file = "mypy-0.982-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:26ae64555d480ad4b32a267d10cab7aec92ff44de35a7cd95b2b7cb8e64ebe3e"}, - {file = "mypy-0.982-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6389af3e204975d6658de4fb8ac16f58c14e1bacc6142fee86d1b5b26aa52bda"}, - {file = "mypy-0.982-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b35ce03a289480d6544aac85fa3674f493f323d80ea7226410ed065cd46f206"}, - {file = "mypy-0.982-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c6e564f035d25c99fd2b863e13049744d96bd1947e3d3d2f16f5828864506763"}, - {file = "mypy-0.982-cp38-cp38-win_amd64.whl", hash = "sha256:cebca7fd333f90b61b3ef7f217ff75ce2e287482206ef4a8b18f32b49927b1a2"}, - {file = "mypy-0.982-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a705a93670c8b74769496280d2fe6cd59961506c64f329bb179970ff1d24f9f8"}, - {file = "mypy-0.982-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75838c649290d83a2b83a88288c1eb60fe7a05b36d46cbea9d22efc790002146"}, - {file = "mypy-0.982-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:91781eff1f3f2607519c8b0e8518aad8498af1419e8442d5d0afb108059881fc"}, - {file = "mypy-0.982-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa97b9ddd1dd9901a22a879491dbb951b5dec75c3b90032e2baa7336777363b"}, - {file = "mypy-0.982-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a692a8e7d07abe5f4b2dd32d731812a0175626a90a223d4b58f10f458747dd8a"}, - {file = "mypy-0.982-cp39-cp39-win_amd64.whl", hash = "sha256:eb7a068e503be3543c4bd329c994103874fa543c1727ba5288393c21d912d795"}, - {file = "mypy-0.982-py3-none-any.whl", hash = "sha256:1021c241e8b6e1ca5a47e4d52601274ac078a89845cfde66c6d5f769819ffa1d"}, - {file = "mypy-0.982.tar.gz", hash = "sha256:85f7a343542dc8b1ed0a888cdd34dca56462654ef23aa673907305b260b3d746"}, -] mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -mysql-connector-python = [ - {file = "mysql-connector-python-8.0.31.tar.gz", hash = "sha256:0fbe8f5441ad781b4f65c54a10ac77c6a329591456607e042786528599519636"}, - {file = "mysql_connector_python-8.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3e271d8de00d5e9f9bd4b212c8e23d2986dead0f20379010f3b274a3e24cbfcb"}, - {file = "mysql_connector_python-8.0.31-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f3ee04a601f9cb90ace9618bbe2fa8e5bb59be3eb0c2bd8a5405fe69e05e446b"}, - {file = "mysql_connector_python-8.0.31-cp310-cp310-manylinux1_i686.whl", hash = "sha256:f89b7a731885b8a04248e4d8d124705ca836f0ddd3b7cf0c789e21f4b32810ed"}, - {file = "mysql_connector_python-8.0.31-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:48eb34f4e69a2fba56f310de6682862a15d46cd2bd51ee6eebc3a244e4ee0aa6"}, - {file = "mysql_connector_python-8.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:a570a72e0015b36b9c0775ae27c1d4946225f02f62129d16a14e9d77a38c0717"}, - {file = "mysql_connector_python-8.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7ac859a52486ac319e37f61469bbb9023faef38018223efa74e953f1fe23d36"}, - {file = "mysql_connector_python-8.0.31-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:79d6a6e8ce955df5ca0786cb8ed8fbd999745c9b50def89993a2a0f4732de721"}, - {file = "mysql_connector_python-8.0.31-cp311-cp311-manylinux1_i686.whl", hash = "sha256:e60426af313dcd526028d018d70757a82c5cc0673776b2a614e2180b5970feed"}, - {file = "mysql_connector_python-8.0.31-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:d0ca1ba3e5fb2f2cddcf271c320cd5c368f8d392c034ddab7a1c8dfd19510351"}, - {file = "mysql_connector_python-8.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:a1d8c1509c740649f352400d50360185e5473371507bb6498ceda0c6e877920c"}, - {file = "mysql_connector_python-8.0.31-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:447847396d1b51edd9cfe05a8c5ba82836d8ea4866f25f36a836cab322fdc4f0"}, - {file = "mysql_connector_python-8.0.31-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:5e01a2f50378c13407a32e40dd4d225cfee5996d9d11968f76720ec28aa45421"}, - {file = "mysql_connector_python-8.0.31-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ac85883ec3b3a9a0e36cacc89b8f5e666206842c432a5f69b09a7687ddf51d4a"}, - {file = "mysql_connector_python-8.0.31-cp37-cp37m-win_amd64.whl", hash = "sha256:28cb3667be64ebfbd3d477bbd2c71e50d48bd5ed7ba2072dd460ae886d27e88e"}, - {file = "mysql_connector_python-8.0.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:30f4542d4d20357c79604e6bf1a801e71dfc45c759c22b502ca5aa8122c3e859"}, - {file = "mysql_connector_python-8.0.31-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:e9e5ad544adfc82ffbda2c74685c8c953bce2e212c56f117020079f05e2c68b2"}, - {file = "mysql_connector_python-8.0.31-cp38-cp38-manylinux1_i686.whl", hash = "sha256:744c976569e81eecce5e8c7e8f80df2a1c3f64414829addc69c64aef8f56d091"}, - {file = "mysql_connector_python-8.0.31-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:17d6ea22dacca7fa78a73a81f2b186d4c5c6e70b7be314e352526654e9ba4713"}, - {file = "mysql_connector_python-8.0.31-cp38-cp38-win_amd64.whl", hash = "sha256:ae1b3d03802474a161cce8a97024484d18bef43b86d20114908cbc263817cade"}, - {file = "mysql_connector_python-8.0.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:746df133c677fbe4687da33aad5a711abdd9bd2277bbc350e20f903f07c81ef5"}, - {file = "mysql_connector_python-8.0.31-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:4d75e6c3a7f18004e8279cbd9f5edc70089d6aaf3cb64374e21098d9bf0b93c4"}, - {file = "mysql_connector_python-8.0.31-cp39-cp39-manylinux1_i686.whl", hash = "sha256:8ad0d08f3f7c9e48d6d102c7de718e5e44f630f916ff2f4b4ff8a3756b5d10ac"}, - {file = "mysql_connector_python-8.0.31-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:02526f16eacc3961ff681c5c8455d2306a9b45124f2f012ca75a1eac9ceb5165"}, - {file = "mysql_connector_python-8.0.31-cp39-cp39-win_amd64.whl", hash = "sha256:b2bbf443f6346e46c26a3e91dd96a428a1038f2d3c5e466541078479c64a1833"}, - {file = "mysql_connector_python-8.0.31-py2.py3-none-any.whl", hash = "sha256:9be9c4dcae987a2a3f07b2ad984984c24f90887dbfab3c8a971e631ad4ca5ccf"}, + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] nodeenv = [ {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, ] packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, ] pathspec = [ - {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, - {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, + {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, + {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, ] pbr = [ - {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, - {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, -] -pep8-naming = [ - {file = "pep8-naming-0.13.2.tar.gz", hash = "sha256:93eef62f525fd12a6f8c98f4dcc17fa70baae2f37fa1f73bec00e3e44392fa48"}, - {file = "pep8_naming-0.13.2-py3-none-any.whl", hash = "sha256:59e29e55c478db69cffbe14ab24b5bd2cd615c0413edf790d47d3fb7ba9a4e23"}, + {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, + {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, ] platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, -] -pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "platformdirs-3.2.0-py3-none-any.whl", hash = "sha256:ebe11c0d7a805086e99506aa331612429a72ca7cd52a1f0d277dc4adc20cb10e"}, + {file = "platformdirs-3.2.0.tar.gz", hash = "sha256:d5b638ca397f25f979350ff789db335903d7ea010ab28903f57b27e1b16c2b08"}, ] pre-commit = [ - {file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"}, - {file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"}, -] -pre-commit-hooks = [ - {file = "pre_commit_hooks-4.3.0-py2.py3-none-any.whl", hash = "sha256:9ccaf7c98794659d345080ee1ea0256a55ae059675045eebdbbc17c0be8c7e4b"}, - {file = "pre_commit_hooks-4.3.0.tar.gz", hash = "sha256:fda598a4c834d030727e6a615722718b47510f4bed72df4c949f95ba9f5aaf88"}, -] -prompt-toolkit = [ - {file = "prompt_toolkit-3.0.31-py3-none-any.whl", hash = "sha256:9696f386133df0fc8ca5af4895afe5d78f5fcfe5258111c2a79a1c3e41ffa96d"}, - {file = "prompt_toolkit-3.0.31.tar.gz", hash = "sha256:9ada952c9d1787f52ff6d5f3484d0b4df8952787c087edf6a1f7c2cb1ea88148"}, -] -protobuf = [ - {file = "protobuf-3.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996"}, - {file = "protobuf-3.20.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3"}, - {file = "protobuf-3.20.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde"}, - {file = "protobuf-3.20.1-cp310-cp310-win32.whl", hash = "sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c"}, - {file = "protobuf-3.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7"}, - {file = "protobuf-3.20.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153"}, - {file = "protobuf-3.20.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f"}, - {file = "protobuf-3.20.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20"}, - {file = "protobuf-3.20.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531"}, - {file = "protobuf-3.20.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e"}, - {file = "protobuf-3.20.1-cp37-cp37m-win32.whl", hash = "sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c"}, - {file = "protobuf-3.20.1-cp37-cp37m-win_amd64.whl", hash = "sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067"}, - {file = "protobuf-3.20.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf"}, - {file = "protobuf-3.20.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab"}, - {file = "protobuf-3.20.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c"}, - {file = "protobuf-3.20.1-cp38-cp38-win32.whl", hash = "sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7"}, - {file = "protobuf-3.20.1-cp38-cp38-win_amd64.whl", hash = "sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739"}, - {file = "protobuf-3.20.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7"}, - {file = "protobuf-3.20.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f"}, - {file = "protobuf-3.20.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9"}, - {file = "protobuf-3.20.1-cp39-cp39-win32.whl", hash = "sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8"}, - {file = "protobuf-3.20.1-cp39-cp39-win_amd64.whl", hash = "sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91"}, - {file = "protobuf-3.20.1-py2.py3-none-any.whl", hash = "sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388"}, - {file = "protobuf-3.20.1.tar.gz", hash = "sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9"}, -] -psycopg2 = [ - {file = "psycopg2-2.9.5-cp310-cp310-win32.whl", hash = "sha256:d3ef67e630b0de0779c42912fe2cbae3805ebaba30cda27fea2a3de650a9414f"}, - {file = "psycopg2-2.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:4cb9936316d88bfab614666eb9e32995e794ed0f8f6b3b718666c22819c1d7ee"}, - {file = "psycopg2-2.9.5-cp311-cp311-win32.whl", hash = "sha256:093e3894d2d3c592ab0945d9eba9d139c139664dcf83a1c440b8a7aa9bb21955"}, - {file = "psycopg2-2.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:920bf418000dd17669d2904472efeab2b20546efd0548139618f8fa305d1d7ad"}, - {file = "psycopg2-2.9.5-cp36-cp36m-win32.whl", hash = "sha256:b9ac1b0d8ecc49e05e4e182694f418d27f3aedcfca854ebd6c05bb1cffa10d6d"}, - {file = "psycopg2-2.9.5-cp36-cp36m-win_amd64.whl", hash = "sha256:fc04dd5189b90d825509caa510f20d1d504761e78b8dfb95a0ede180f71d50e5"}, - {file = "psycopg2-2.9.5-cp37-cp37m-win32.whl", hash = "sha256:922cc5f0b98a5f2b1ff481f5551b95cd04580fd6f0c72d9b22e6c0145a4840e0"}, - {file = "psycopg2-2.9.5-cp37-cp37m-win_amd64.whl", hash = "sha256:1e5a38aa85bd660c53947bd28aeaafb6a97d70423606f1ccb044a03a1203fe4a"}, - {file = "psycopg2-2.9.5-cp38-cp38-win32.whl", hash = "sha256:f5b6320dbc3cf6cfb9f25308286f9f7ab464e65cfb105b64cc9c52831748ced2"}, - {file = "psycopg2-2.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:1a5c7d7d577e0eabfcf15eb87d1e19314c8c4f0e722a301f98e0e3a65e238b4e"}, - {file = "psycopg2-2.9.5-cp39-cp39-win32.whl", hash = "sha256:322fd5fca0b1113677089d4ebd5222c964b1760e361f151cbb2706c4912112c5"}, - {file = "psycopg2-2.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:190d51e8c1b25a47484e52a79638a8182451d6f6dff99f26ad9bd81e5359a0fa"}, - {file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"}, -] -pyasn1 = [ - {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, - {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, + {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"}, + {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"}, ] pycodestyle = [ {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, ] pydocstyle = [ - {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, - {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, + {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, + {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, ] pyflakes = [ {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, ] pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, -] -pyjwt = [ - {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"}, - {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"}, -] -pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] -pyrsistent = [ - {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, - {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, - {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, - {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, -] -pytest = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, -] -pytest-flask = [ - {file = "pytest-flask-1.2.0.tar.gz", hash = "sha256:46fde652f77777bf02dc91205aec4ce20cdf2acbbbd66a918ab91f5c14693d3d"}, - {file = "pytest_flask-1.2.0-py3-none-any.whl", hash = "sha256:fe25b39ad0db09c3d1fe728edecf97ced85e774c775db259a6d25f0270a4e7c9"}, -] -pytest-flask-sqlalchemy = [ - {file = "pytest-flask-sqlalchemy-1.1.0.tar.gz", hash = "sha256:db71a57b90435e5d854b21c37a2584056d6fc3ddb28c09d8d0a2546bd6e390ff"}, - {file = "pytest_flask_sqlalchemy-1.1.0-py3-none-any.whl", hash = "sha256:b9f272d5c4092fcbe4a6284e402a37cad84f5b9be3c0bbe1a11927f24c99ff83"}, -] -pytest-mock = [ - {file = "pytest-mock-3.10.0.tar.gz", hash = "sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f"}, - {file = "pytest_mock-3.10.0-py3-none-any.whl", hash = "sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b"}, -] -python-jose = [ - {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, - {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, -] -python-keycloak = [ - {file = "python-keycloak-2.6.0.tar.gz", hash = "sha256:08c530ff86f631faccb8033d9d9345cc3148cb2cf132ff7564f025292e4dbd96"}, - {file = "python_keycloak-2.6.0-py3-none-any.whl", hash = "sha256:a1ce102b978beb56d385319b3ca20992b915c2c12d15a2d0c23f1104882f3fb6"}, -] -pytz = [ - {file = "pytz-2022.5-py2.py3-none-any.whl", hash = "sha256:335ab46900b1465e714b4fda4963d87363264eb662aab5e65da039c25f1f5b22"}, - {file = "pytz-2022.5.tar.gz", hash = "sha256:c4d88f472f54d615e9cd582a5004d1e5f624854a6a27a6211591c251f22a6914"}, -] -pytz-deprecation-shim = [ - {file = "pytz_deprecation_shim-0.1.0.post0-py2.py3-none-any.whl", hash = "sha256:8314c9692a636c8eb3bda879b9f119e350e93223ae83e70e80c31675a0fdc1a6"}, - {file = "pytz_deprecation_shim-0.1.0.post0.tar.gz", hash = "sha256:af097bae1b616dde5c5744441e2ddc69e74dfdcb0c263129610d85b87445a59d"}, -] -pyupgrade = [ - {file = "pyupgrade-3.1.0-py2.py3-none-any.whl", hash = "sha256:77c6101a710be3e24804891e43388cedbee617258e93b09c8c5e58de08617758"}, - {file = "pyupgrade-3.1.0.tar.gz", hash = "sha256:7a8d393d85e15e0e2753e90b7b2e173b9d29dfd71e61f93d93e985b242627ed3"}, + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, ] pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, @@ -2917,111 +604,12 @@ pyyaml = [ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] -reorder-python-imports = [ - {file = "reorder_python_imports-3.9.0-py2.py3-none-any.whl", hash = "sha256:3f9c16e8781f54c944756d0d1eb34a8c863554f7a4eb3693f574fe19b1a29b56"}, - {file = "reorder_python_imports-3.9.0.tar.gz", hash = "sha256:49292ed537829a6bece9fb3746fc1bbe98f52643be5de01a4e13680268a5b0ec"}, -] -requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, -] -requests-toolbelt = [ - {file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"}, - {file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"}, -] -restrictedpython = [ - {file = "RestrictedPython-6.0-py3-none-any.whl", hash = "sha256:3479303f7bff48a7dedad76f96e7704993c5e86c5adbd67f607295d5352f0fb8"}, - {file = "RestrictedPython-6.0.tar.gz", hash = "sha256:405cf0bd9eec2f19b1326b5f48228efe56d6590b4e91826b8cc3b2cd400a96ad"}, -] restructuredtext-lint = [ {file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"}, ] -rsa = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] -ruamel-yaml = [ - {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, - {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, -] -safety = [ - {file = "safety-2.3.1-py3-none-any.whl", hash = "sha256:8f098d12b607db2756886280e85c28ece8db1bba4f45fc5f981f4663217bd619"}, - {file = "safety-2.3.1.tar.gz", hash = "sha256:6e6fcb7d4e8321098cf289f59b65051cafd3467f089c6e57c9f894ae32c23b71"}, -] -sentry-sdk = [ - {file = "sentry-sdk-1.10.1.tar.gz", hash = "sha256:105faf7bd7b7fa25653404619ee261527266b14103fe1389e0ce077bd23a9691"}, - {file = "sentry_sdk-1.10.1-py2.py3-none-any.whl", hash = "sha256:06c0fa9ccfdc80d7e3b5d2021978d6eb9351fa49db9b5847cf4d1f2a473414ad"}, -] setuptools = [ - {file = "setuptools-65.5.0-py3-none-any.whl", hash = "sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356"}, - {file = "setuptools-65.5.0.tar.gz", hash = "sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17"}, -] -simplejson = [ - {file = "simplejson-3.17.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:0de783e9c2b87bdd75b57efa2b6260c24b94605b5c9843517577d40ee0c3cc8a"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:d24a9e61df7a7787b338a58abfba975414937b609eb6b18973e25f573bc0eeeb"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:e8603e691580487f11306ecb066c76f1f4a8b54fb3bdb23fa40643a059509366"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:9b01e7b00654115965a206e3015f0166674ec1e575198a62a977355597c0bef5"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:37bc0cf0e5599f36072077e56e248f3336917ded1d33d2688624d8ed3cefd7d2"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cf6e7d5fe2aeb54898df18db1baf479863eae581cce05410f61f6b4188c8ada1"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:bdfc54b4468ed4cd7415928cbe782f4d782722a81aeb0f81e2ddca9932632211"}, - {file = "simplejson-3.17.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd16302d39c4d6f4afde80edd0c97d4db643327d355a312762ccd9bd2ca515ed"}, - {file = "simplejson-3.17.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:deac4bdafa19bbb89edfb73b19f7f69a52d0b5bd3bb0c4ad404c1bbfd7b4b7fd"}, - {file = "simplejson-3.17.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8bbdb166e2fb816e43ab034c865147edafe28e1b19c72433147789ac83e2dda"}, - {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7854326920d41c3b5d468154318fe6ba4390cb2410480976787c640707e0180"}, - {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:04e31fa6ac8e326480703fb6ded1488bfa6f1d3f760d32e29dbf66d0838982ce"}, - {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f63600ec06982cdf480899026f4fda622776f5fabed9a869fdb32d72bc17e99a"}, - {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e03c3b8cc7883a54c3f34a6a135c4a17bc9088a33f36796acdb47162791b02f6"}, - {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a2d30d6c1652140181dc6861f564449ad71a45e4f165a6868c27d36745b65d40"}, - {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a1aa6e4cae8e3b8d5321be4f51c5ce77188faf7baa9fe1e78611f93a8eed2882"}, - {file = "simplejson-3.17.6-cp310-cp310-win32.whl", hash = "sha256:97202f939c3ff341fc3fa84d15db86156b1edc669424ba20b0a1fcd4a796a045"}, - {file = "simplejson-3.17.6-cp310-cp310-win_amd64.whl", hash = "sha256:80d3bc9944be1d73e5b1726c3bbfd2628d3d7fe2880711b1eb90b617b9b8ac70"}, - {file = "simplejson-3.17.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9fa621b3c0c05d965882c920347b6593751b7ab20d8fa81e426f1735ca1a9fc7"}, - {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd2fb11922f58df8528adfca123f6a84748ad17d066007e7ac977720063556bd"}, - {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:724c1fe135aa437d5126138d977004d165a3b5e2ee98fc4eb3e7c0ef645e7e27"}, - {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4ff4ac6ff3aa8f814ac0f50bf218a2e1a434a17aafad4f0400a57a8cc62ef17f"}, - {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:67093a526e42981fdd954868062e56c9b67fdd7e712616cc3265ad0c210ecb51"}, - {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b4af7ad7e4ac515bc6e602e7b79e2204e25dbd10ab3aa2beef3c5a9cad2c7"}, - {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:1c9b1ed7ed282b36571638297525f8ef80f34b3e2d600a56f962c6044f24200d"}, - {file = "simplejson-3.17.6-cp36-cp36m-win32.whl", hash = "sha256:632ecbbd2228575e6860c9e49ea3cc5423764d5aa70b92acc4e74096fb434044"}, - {file = "simplejson-3.17.6-cp36-cp36m-win_amd64.whl", hash = "sha256:4c09868ddb86bf79b1feb4e3e7e4a35cd6e61ddb3452b54e20cf296313622566"}, - {file = "simplejson-3.17.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b6bd8144f15a491c662f06814bd8eaa54b17f26095bb775411f39bacaf66837"}, - {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5decdc78849617917c206b01e9fc1d694fd58caa961be816cb37d3150d613d9a"}, - {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:521877c7bd060470806eb6335926e27453d740ac1958eaf0d8c00911bc5e1802"}, - {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:65b998193bd7b0c7ecdfffbc825d808eac66279313cb67d8892bb259c9d91494"}, - {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac786f6cb7aa10d44e9641c7a7d16d7f6e095b138795cd43503769d4154e0dc2"}, - {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3ff5b3464e1ce86a8de8c88e61d4836927d5595c2162cab22e96ff551b916e81"}, - {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:69bd56b1d257a91e763256d63606937ae4eb890b18a789b66951c00062afec33"}, - {file = "simplejson-3.17.6-cp37-cp37m-win32.whl", hash = "sha256:b81076552d34c27e5149a40187a8f7e2abb2d3185576a317aaf14aeeedad862a"}, - {file = "simplejson-3.17.6-cp37-cp37m-win_amd64.whl", hash = "sha256:07ecaafc1b1501f275bf5acdee34a4ad33c7c24ede287183ea77a02dc071e0c0"}, - {file = "simplejson-3.17.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:068670af975247acbb9fc3d5393293368cda17026db467bf7a51548ee8f17ee1"}, - {file = "simplejson-3.17.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4d1c135af0c72cb28dd259cf7ba218338f4dc027061262e46fe058b4e6a4c6a3"}, - {file = "simplejson-3.17.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23fe704da910ff45e72543cbba152821685a889cf00fc58d5c8ee96a9bad5f94"}, - {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f444762fed1bc1fd75187ef14a20ed900c1fbb245d45be9e834b822a0223bc81"}, - {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:681eb4d37c9a9a6eb9b3245a5e89d7f7b2b9895590bb08a20aa598c1eb0a1d9d"}, - {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8e8607d8f6b4f9d46fee11447e334d6ab50e993dd4dbfb22f674616ce20907ab"}, - {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b10556817f09d46d420edd982dd0653940b90151d0576f09143a8e773459f6fe"}, - {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e1ec8a9ee0987d4524ffd6299e778c16cc35fef6d1a2764e609f90962f0b293a"}, - {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b4126cac7d69ac06ff22efd3e0b3328a4a70624fcd6bca4fc1b4e6d9e2e12bf"}, - {file = "simplejson-3.17.6-cp38-cp38-win32.whl", hash = "sha256:35a49ebef25f1ebdef54262e54ae80904d8692367a9f208cdfbc38dbf649e00a"}, - {file = "simplejson-3.17.6-cp38-cp38-win_amd64.whl", hash = "sha256:743cd768affaa508a21499f4858c5b824ffa2e1394ed94eb85caf47ac0732198"}, - {file = "simplejson-3.17.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb62d517a516128bacf08cb6a86ecd39fb06d08e7c4980251f5d5601d29989ba"}, - {file = "simplejson-3.17.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:12133863178a8080a3dccbf5cb2edfab0001bc41e5d6d2446af2a1131105adfe"}, - {file = "simplejson-3.17.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5540fba2d437edaf4aa4fbb80f43f42a8334206ad1ad3b27aef577fd989f20d9"}, - {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d74ee72b5071818a1a5dab47338e87f08a738cb938a3b0653b9e4d959ddd1fd9"}, - {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:28221620f4dcabdeac310846629b976e599a13f59abb21616356a85231ebd6ad"}, - {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b09bc62e5193e31d7f9876220fb429ec13a6a181a24d897b9edfbbdbcd678851"}, - {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7255a37ff50593c9b2f1afa8fafd6ef5763213c1ed5a9e2c6f5b9cc925ab979f"}, - {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:401d40969cee3df7bda211e57b903a534561b77a7ade0dd622a8d1a31eaa8ba7"}, - {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a649d0f66029c7eb67042b15374bd93a26aae202591d9afd71e111dd0006b198"}, - {file = "simplejson-3.17.6-cp39-cp39-win32.whl", hash = "sha256:522fad7be85de57430d6d287c4b635813932946ebf41b913fe7e880d154ade2e"}, - {file = "simplejson-3.17.6-cp39-cp39-win_amd64.whl", hash = "sha256:3fe87570168b2ae018391e2b43fbf66e8593a86feccb4b0500d134c998983ccc"}, - {file = "simplejson-3.17.6.tar.gz", hash = "sha256:cf98038d2abf63a1ada5730e91e84c642ba6c225b0198c3684151b1f80c5f8a6"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "setuptools-67.6.1-py3-none-any.whl", hash = "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078"}, + {file = "setuptools-67.6.1.tar.gz", hash = "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a"}, ] smmap = [ {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, @@ -3031,279 +619,11 @@ snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] -soupsieve = [ - {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, - {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, -] -sphinx = [ - {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, - {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, -] -sphinx-autoapi = [ - {file = "sphinx-autoapi-2.0.0.tar.gz", hash = "sha256:97dcf1b5b54cd0d8efef867594e4a4f3e2d3a2c0ec1e5a891e0a61bc77046006"}, - {file = "sphinx_autoapi-2.0.0-py2.py3-none-any.whl", hash = "sha256:dab2753a38cad907bf4e61473c0da365a26bfbe69fbf5aa6e4f7d48e1cf8a148"}, -] -sphinx-autobuild = [ - {file = "sphinx-autobuild-2021.3.14.tar.gz", hash = "sha256:de1ca3b66e271d2b5b5140c35034c89e47f263f2cd5db302c9217065f7443f05"}, - {file = "sphinx_autobuild-2021.3.14-py3-none-any.whl", hash = "sha256:8fe8cbfdb75db04475232f05187c776f46f6e9e04cacf1e49ce81bdac649ccac"}, -] -sphinx-basic-ng = [ - {file = "sphinx_basic_ng-1.0.0b1-py3-none-any.whl", hash = "sha256:ade597a3029c7865b24ad0eda88318766bcc2f9f4cef60df7e28126fde94db2a"}, - {file = "sphinx_basic_ng-1.0.0b1.tar.gz", hash = "sha256:89374bd3ccd9452a301786781e28c8718e99960f2d4f411845ea75fc7bb5a9b0"}, -] -sphinx-click = [ - {file = "sphinx-click-4.4.0.tar.gz", hash = "sha256:cc67692bd28f482c7f01531c61b64e9d2f069bfcf3d24cbbb51d4a84a749fa48"}, - {file = "sphinx_click-4.4.0-py3-none-any.whl", hash = "sha256:2821c10a68fc9ee6ce7c92fad26540d8d8c8f45e6d7258f0e4fb7529ae8fab49"}, -] -sphinxcontrib-applehelp = [ - {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, - {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, -] -sphinxcontrib-devhelp = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, -] -sphinxcontrib-htmlhelp = [ - {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, - {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, -] -sphinxcontrib-jsmath = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] -sphinxcontrib-qthelp = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, -] -sphinxcontrib-serializinghtml = [ - {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, - {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, -] -SpiffWorkflow = [] -sqlalchemy = [ - {file = "SQLAlchemy-1.4.42-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:28e881266a172a4d3c5929182fde6bb6fba22ac93f137d5380cc78a11a9dd124"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ca9389a00f639383c93ed00333ed763812f80b5ae9e772ea32f627043f8c9c88"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27m-win32.whl", hash = "sha256:1d0c23ecf7b3bc81e29459c34a3f4c68ca538de01254e24718a7926810dc39a6"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27m-win_amd64.whl", hash = "sha256:6c9d004eb78c71dd4d3ce625b80c96a827d2e67af9c0d32b1c1e75992a7916cc"}, - {file = "SQLAlchemy-1.4.42-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9e3a65ce9ed250b2f096f7b559fe3ee92e6605fab3099b661f0397a9ac7c8d95"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:2e56dfed0cc3e57b2f5c35719d64f4682ef26836b81067ee6cfad062290fd9e2"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b42c59ffd2d625b28cdb2ae4cde8488543d428cba17ff672a543062f7caee525"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22459fc1718785d8a86171bbe7f01b5c9d7297301ac150f508d06e62a2b4e8d2"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df76e9c60879fdc785a34a82bf1e8691716ffac32e7790d31a98d7dec6e81545"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-win32.whl", hash = "sha256:e7e740453f0149437c101ea4fdc7eea2689938c5760d7dcc436c863a12f1f565"}, - {file = "SQLAlchemy-1.4.42-cp310-cp310-win_amd64.whl", hash = "sha256:effc89e606165ca55f04f3f24b86d3e1c605e534bf1a96e4e077ce1b027d0b71"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:97ff50cd85bb907c2a14afb50157d0d5486a4b4639976b4a3346f34b6d1b5272"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12c6949bae10f1012ab5c0ea52ab8db99adcb8c7b717938252137cdf694c775"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11b2ec26c5d2eefbc3e6dca4ec3d3d95028be62320b96d687b6e740424f83b7d"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-win32.whl", hash = "sha256:6045b3089195bc008aee5c273ec3ba9a93f6a55bc1b288841bd4cfac729b6516"}, - {file = "SQLAlchemy-1.4.42-cp311-cp311-win_amd64.whl", hash = "sha256:0501f74dd2745ec38f44c3a3900fb38b9db1ce21586b691482a19134062bf049"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:6e39e97102f8e26c6c8550cb368c724028c575ec8bc71afbbf8faaffe2b2092a"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15d878929c30e41fb3d757a5853b680a561974a0168cd33a750be4ab93181628"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fa5b7eb2051e857bf83bade0641628efe5a88de189390725d3e6033a1fff4257"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1c5f8182b4f89628d782a183d44db51b5af84abd6ce17ebb9804355c88a7b5"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-win32.whl", hash = "sha256:a7dd5b7b34a8ba8d181402d824b87c5cee8963cb2e23aa03dbfe8b1f1e417cde"}, - {file = "SQLAlchemy-1.4.42-cp36-cp36m-win_amd64.whl", hash = "sha256:5ede1495174e69e273fad68ad45b6d25c135c1ce67723e40f6cf536cb515e20b"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:9256563506e040daddccaa948d055e006e971771768df3bb01feeb4386c242b0"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4948b6c5f4e56693bbeff52f574279e4ff972ea3353f45967a14c30fb7ae2beb"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1811a0b19a08af7750c0b69e38dec3d46e47c4ec1d74b6184d69f12e1c99a5e0"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b01d9cd2f9096f688c71a3d0f33f3cd0af8549014e66a7a7dee6fc214a7277d"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-win32.whl", hash = "sha256:bd448b262544b47a2766c34c0364de830f7fb0772d9959c1c42ad61d91ab6565"}, - {file = "SQLAlchemy-1.4.42-cp37-cp37m-win_amd64.whl", hash = "sha256:04f2598c70ea4a29b12d429a80fad3a5202d56dce19dd4916cc46a965a5ca2e9"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:3ab7c158f98de6cb4f1faab2d12973b330c2878d0c6b689a8ca424c02d66e1b3"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ee377eb5c878f7cefd633ab23c09e99d97c449dd999df639600f49b74725b80"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:934472bb7d8666727746a75670a1f8d91a9cae8c464bba79da30a0f6faccd9e1"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb94a3d1ba77ff2ef11912192c066f01e68416f554c194d769391638c8ad09a"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-win32.whl", hash = "sha256:f0f574465b78f29f533976c06b913e54ab4980b9931b69aa9d306afff13a9471"}, - {file = "SQLAlchemy-1.4.42-cp38-cp38-win_amd64.whl", hash = "sha256:a85723c00a636eed863adb11f1e8aaa36ad1c10089537823b4540948a8429798"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5ce6929417d5dce5ad1d3f147db81735a4a0573b8fb36e3f95500a06eaddd93e"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723e3b9374c1ce1b53564c863d1a6b2f1dc4e97b1c178d9b643b191d8b1be738"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:876eb185911c8b95342b50a8c4435e1c625944b698a5b4a978ad2ffe74502908"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fd49af453e590884d9cdad3586415922a8e9bb669d874ee1dc55d2bc425aacd"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-win32.whl", hash = "sha256:e4ef8cb3c5b326f839bfeb6af5f406ba02ad69a78c7aac0fbeeba994ad9bb48a"}, - {file = "SQLAlchemy-1.4.42-cp39-cp39-win_amd64.whl", hash = "sha256:5f966b64c852592469a7eb759615bbd351571340b8b344f1d3fa2478b5a4c934"}, - {file = "SQLAlchemy-1.4.42.tar.gz", hash = "sha256:177e41914c476ed1e1b77fd05966ea88c094053e17a85303c4ce007f88eff363"}, -] -sqlalchemy-stubs = [] stevedore = [ - {file = "stevedore-4.1.0-py3-none-any.whl", hash = "sha256:3b1cbd592a87315f000d05164941ee5e164899f8fc0ce9a00bb0f321f40ef93e"}, - {file = "stevedore-4.1.0.tar.gz", hash = "sha256:02518a8f0d6d29be8a445b7f2ac63753ff29e8f2a2faa01777568d5500d777a6"}, -] -swagger-ui-bundle = [ - {file = "swagger_ui_bundle-0.0.9-py3-none-any.whl", hash = "sha256:cea116ed81147c345001027325c1ddc9ca78c1ee7319935c3c75d3669279d575"}, - {file = "swagger_ui_bundle-0.0.9.tar.gz", hash = "sha256:b462aa1460261796ab78fd4663961a7f6f347ce01760f1303bbbdf630f11f516"}, -] -tokenize-rt = [ - {file = "tokenize_rt-5.0.0-py2.py3-none-any.whl", hash = "sha256:c67772c662c6b3dc65edf66808577968fb10badfc2042e3027196bed4daf9e5a"}, - {file = "tokenize_rt-5.0.0.tar.gz", hash = "sha256:3160bc0c3e8491312d0485171dea861fc160a240f5f5766b72a1165408d10740"}, -] -toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] -tomli = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] -tornado = [ - {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, - {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"}, - {file = "tornado-6.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba09ef14ca9893954244fd872798b4ccb2367c165946ce2dd7376aebdde8e3ac"}, - {file = "tornado-6.2-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8150f721c101abdef99073bf66d3903e292d851bee51910839831caba341a75"}, - {file = "tornado-6.2-cp37-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a2f5999215a3a06a4fc218026cd84c61b8b2b40ac5296a6db1f1451ef04c1e"}, - {file = "tornado-6.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5f8c52d219d4995388119af7ccaa0bcec289535747620116a58d830e7c25d8a8"}, - {file = "tornado-6.2-cp37-abi3-musllinux_1_1_i686.whl", hash = "sha256:6fdfabffd8dfcb6cf887428849d30cf19a3ea34c2c248461e1f7d718ad30b66b"}, - {file = "tornado-6.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:1d54d13ab8414ed44de07efecb97d4ef7c39f7438cf5e976ccd356bebb1b5fca"}, - {file = "tornado-6.2-cp37-abi3-win32.whl", hash = "sha256:5c87076709343557ef8032934ce5f637dbb552efa7b21d08e89ae7619ed0eb23"}, - {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"}, - {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"}, -] -typeguard = [ - {file = "typeguard-2.13.3-py3-none-any.whl", hash = "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1"}, - {file = "typeguard-2.13.3.tar.gz", hash = "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4"}, -] -types-click = [ - {file = "types-click-7.1.8.tar.gz", hash = "sha256:b6604968be6401dc516311ca50708a0a28baa7a0cb840efd7412f0dbbff4e092"}, - {file = "types_click-7.1.8-py3-none-any.whl", hash = "sha256:8cb030a669e2e927461be9827375f83c16b8178c365852c060a34e24871e7e81"}, -] -types-flask = [ - {file = "types-Flask-1.1.6.tar.gz", hash = "sha256:aac777b3abfff9436e6b01f6d08171cf23ea6e5be71cbf773aaabb1c5763e9cf"}, - {file = "types_Flask-1.1.6-py3-none-any.whl", hash = "sha256:6ab8a9a5e258b76539d652f6341408867298550b19b81f0e41e916825fc39087"}, -] -types-jinja2 = [ - {file = "types-Jinja2-2.11.9.tar.gz", hash = "sha256:dbdc74a40aba7aed520b7e4d89e8f0fe4286518494208b35123bcf084d4b8c81"}, - {file = "types_Jinja2-2.11.9-py3-none-any.whl", hash = "sha256:60a1e21e8296979db32f9374d8a239af4cb541ff66447bb915d8ad398f9c63b2"}, -] -types-markupsafe = [ - {file = "types-MarkupSafe-1.1.10.tar.gz", hash = "sha256:85b3a872683d02aea3a5ac2a8ef590193c344092032f58457287fbf8e06711b1"}, - {file = "types_MarkupSafe-1.1.10-py3-none-any.whl", hash = "sha256:ca2bee0f4faafc45250602567ef38d533e877d2ddca13003b319c551ff5b3cc5"}, -] -types-pytz = [ - {file = "types-pytz-2022.5.0.0.tar.gz", hash = "sha256:0c163b15d3e598e6cc7074a99ca9ec72b25dc1b446acc133b827667af0b7b09a"}, - {file = "types_pytz-2022.5.0.0-py3-none-any.whl", hash = "sha256:a8e1fe6a1b270fbfaf2553b20ad0f1316707cc320e596da903bb17d7373fed2d"}, -] -types-pyyaml = [ - {file = "types-PyYAML-6.0.12.1.tar.gz", hash = "sha256:70ccaafcf3fb404d57bffc1529fdd86a13e8b4f2cf9fc3ee81a6408ce0ad59d2"}, - {file = "types_PyYAML-6.0.12.1-py3-none-any.whl", hash = "sha256:aaf5e51444c13bd34104695a89ad9c48412599a4f615d65a60e649109714f608"}, -] -types-requests = [ - {file = "types-requests-2.28.11.2.tar.gz", hash = "sha256:fdcd7bd148139fb8eef72cf4a41ac7273872cad9e6ada14b11ff5dfdeee60ed3"}, - {file = "types_requests-2.28.11.2-py3-none-any.whl", hash = "sha256:14941f8023a80b16441b3b46caffcbfce5265fd14555844d6029697824b5a2ef"}, -] -types-urllib3 = [ - {file = "types-urllib3-1.26.25.1.tar.gz", hash = "sha256:a948584944b2412c9a74b9cf64f6c48caf8652cb88b38361316f6d15d8a184cd"}, - {file = "types_urllib3-1.26.25.1-py3-none-any.whl", hash = "sha256:f6422596cc9ee5fdf68f9d547f541096a20c2dcfd587e37c804c9ea720bf5cb2"}, -] -types-werkzeug = [ - {file = "types-Werkzeug-1.0.9.tar.gz", hash = "sha256:5cc269604c400133d452a40cee6397655f878fc460e03fde291b9e3a5eaa518c"}, - {file = "types_Werkzeug-1.0.9-py3-none-any.whl", hash = "sha256:194bd5715a13c598f05c63e8a739328657590943bce941e8a3619a6b5d4a54ec"}, -] -typing-extensions = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, -] -tzdata = [ - {file = "tzdata-2022.5-py2.py3-none-any.whl", hash = "sha256:323161b22b7802fdc78f20ca5f6073639c64f1a7227c40cd3e19fd1d0ce6650a"}, - {file = "tzdata-2022.5.tar.gz", hash = "sha256:e15b2b3005e2546108af42a0eb4ccab4d9e225e2dfbf4f77aad50c70a4b1f3ab"}, -] -tzlocal = [ - {file = "tzlocal-4.2-py3-none-any.whl", hash = "sha256:89885494684c929d9191c57aa27502afc87a579be5cdd3225c77c463ea043745"}, - {file = "tzlocal-4.2.tar.gz", hash = "sha256:ee5842fa3a795f023514ac2d801c4a81d1743bbe642e3940143326b3a00addd7"}, -] -unidecode = [ - {file = "Unidecode-1.3.6-py3-none-any.whl", hash = "sha256:547d7c479e4f377b430dd91ac1275d593308dce0fc464fb2ab7d41f82ec653be"}, - {file = "Unidecode-1.3.6.tar.gz", hash = "sha256:fed09cf0be8cf415b391642c2a5addfc72194407caee4f98719e40ec2a72b830"}, -] -urllib3 = [ - {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, - {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, -] -vine = [ - {file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"}, - {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, + {file = "stevedore-5.0.0-py3-none-any.whl", hash = "sha256:bd5a71ff5e5e5f5ea983880e4a1dd1bb47f8feebbb3d95b592398e2f02194771"}, + {file = "stevedore-5.0.0.tar.gz", hash = "sha256:2c428d2338976279e8eb2196f7a94910960d9f7ba2f41f3988511e95ca447021"}, ] virtualenv = [ - {file = "virtualenv-20.16.6-py3-none-any.whl", hash = "sha256:186ca84254abcbde98180fd17092f9628c5fe742273c02724972a1d8a2035108"}, - {file = "virtualenv-20.16.6.tar.gz", hash = "sha256:530b850b523c6449406dfba859d6345e48ef19b8439606c5d74d7d3c9e14d76e"}, -] -wcwidth = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, -] -werkzeug = [ - {file = "Werkzeug-2.2.2-py3-none-any.whl", hash = "sha256:f979ab81f58d7318e064e99c4506445d60135ac5cd2e177a2de0089bfd4c9bd5"}, - {file = "Werkzeug-2.2.2.tar.gz", hash = "sha256:7ea2d48322cc7c0f8b3a215ed73eabd7b5d75d0b50e31ab006286ccff9e00b8f"}, -] -wrapt = [ - {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, - {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, - {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, - {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, - {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, - {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, - {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, - {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, - {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, - {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, - {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, - {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, - {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, - {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, - {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, - {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, -] -wtforms = [ - {file = "WTForms-3.0.1-py3-none-any.whl", hash = "sha256:837f2f0e0ca79481b92884962b914eba4e72b7a2daaf1f939c890ed0124b834b"}, - {file = "WTForms-3.0.1.tar.gz", hash = "sha256:6b351bbb12dd58af57ffef05bc78425d08d1914e0fd68ee14143b7ade023c5bc"}, -] -xdoctest = [ - {file = "xdoctest-1.1.0-py3-none-any.whl", hash = "sha256:da330c4dacee51f3c785820bc743188fb6f7c64c5fa1c54bff8836b3cf23d69b"}, - {file = "xdoctest-1.1.0.tar.gz", hash = "sha256:0fd4fad7932f0a2f082dfdfb857dd6ca41603757586c39b1e5b4d333fc389f8a"}, + {file = "virtualenv-20.21.0-py3-none-any.whl", hash = "sha256:31712f8f2a17bd06234fa97fdf19609e789dd4e3e4bf108c3da71d710651adbc"}, + {file = "virtualenv-20.21.0.tar.gz", hash = "sha256:f50e3e60f990a0757c9b68333c9fdaa72d7188caa417f96af9e52407831a3b68"}, ] diff --git a/pyproject.toml b/pyproject.toml index 5448c54c..5d6b64ca 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,71 +13,8 @@ classifiers = [ [tool.poetry.dependencies] python = ">=3.11,<3.12" -click = "^8.0.1" -flask = "2.2.2" -flask-admin = "*" -flask-bcrypt = "*" -flask-cors = "*" -flask-mail = "*" -flask-marshmallow = "*" -flask-migrate = "*" -flask-restful = "*" -werkzeug = "*" -# go back to main once https://github.com/sartography/SpiffWorkflow/pull/241 is merged -SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} -# SpiffWorkflow = {develop = true, path = "/Users/kevin/projects/github/sartography/SpiffWorkflow"} -# SpiffWorkflow = {develop = true, path = "/home/jason/projects/github/sartography/SpiffWorkflow"} -sentry-sdk = "^1.10" -sphinx-autoapi = "^2.0" -# flask-bpmn = {develop = true, path = "/home/jason/projects/github/sartography/flask-bpmn"} -# flask-bpmn = {develop = true, path = "/Users/kevin/projects/github/sartography/flask-bpmn"} -flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"} -mysql-connector-python = "^8.0.29" -pytest-flask = "^1.2.0" -pytest-flask-sqlalchemy = "^1.1.0" -psycopg2 = "^2.9.3" -typing-extensions = "^4.4.0" -connexion = {extras = [ "swagger-ui",], version = "^2"} -lxml = "^4.9.1" -marshmallow-enum = "^1.5.1" -marshmallow-sqlalchemy = "^0.28.0" -PyJWT = "^2.6.0" -gunicorn = "^20.1.0" -python-keycloak = "^2.5.0" -APScheduler = "^3.9.1" -Jinja2 = "^3.1.2" -RestrictedPython = "^6.0" -Flask-SQLAlchemy = "^3" - -# type hinting stuff -# these need to be in the normal (non dev-dependencies) section -# because if not then poetry export won't have them and nox -s mypy --pythons 3.10 -# will fail -types-Werkzeug = "^1.0.9" -types-PyYAML = "^6.0.12" -types-Flask = "^1.1.6" -types-requests = "^2.28.6" -types-pytz = "^2022.1.1" - -# https://github.com/dropbox/sqlalchemy-stubs/pull/251 -# someday get off github -# sqlalchemy-stubs = "^0.4" -# sqlalchemy-stubs = { git = "https://github.com/dropbox/sqlalchemy-stubs.git", rev = "master" } -# sqlalchemy-stubs = {develop = true, path = "/Users/kevin/projects/github/sqlalchemy-stubs"} -# for now use my fork -sqlalchemy-stubs = { git = "https://github.com/burnettk/sqlalchemy-stubs.git", rev = "scoped-session-delete" } -simplejson = "^3.17.6" - [tool.poetry.dev-dependencies] -pytest = "^7.1.2" -coverage = {extras = ["toml"], version = "^6.1"} -safety = "^2.3.1" -mypy = ">=0.961" -typeguard = "^2.13.2" -xdoctest = {extras = ["colors"], version = "^1.0.1"} -sphinx = "^5.0.2" -sphinx-autobuild = ">=2021.3.14" pre-commit = "^2.20.0" flake8 = "^4.0.1" black = ">=21.10b0" @@ -89,71 +26,3 @@ bandit = "1.7.2" flake8-bugbear = "^22.10.25" flake8-docstrings = "^1.6.0" flake8-rst-docstrings = "^0.2.7" -# flask-sqlalchemy-stubs = "^0.2" -pep8-naming = "^0.13.2" -darglint = "^1.8.1" -reorder-python-imports = "^3.9.0" -pre-commit-hooks = "^4.0.1" -sphinx-click = "^4.3.0" -Pygments = "^2.10.0" -pyupgrade = "^3.1.0" -furo = ">=2021.11.12" - -[tool.poetry.scripts] -spiffworkflow-backend = "spiffworkflow_backend.__main__:main" - -[tool.poetry.group.dev.dependencies] -tomli = "^2.0.1" - -[tool.pytest.ini_options] -# ignore deprecation warnings from various packages that we don't control -filterwarnings = [ - # note the use of single quote below to denote "raw" strings in TOML - # kombu/utils/compat.py:82 - 'ignore:SelectableGroups dict interface is deprecated. Use select.', - # flask_marshmallow/__init__.py:34 - # marshmallow_sqlalchemy/convert.py:17 - 'ignore:distutils Version classes are deprecated. Use packaging.version instead.', - # connexion/spec.py:50 - 'ignore:Passing a schema to Validator.iter_errors is deprecated and will be removed in a future release', - # connexion/decorators/validation.py:16 - 'ignore:Accessing jsonschema.draft4_format_checker is deprecated and will be removed in a future release.', - # connexion/apis/flask_api.py:236 - "ignore:'_request_ctx_stack' is deprecated and will be removed in Flask 2.3", - "ignore:Setting 'json_encoder' on the app or a blueprint is deprecated and will be removed in Flask 2.3", - "ignore:'JSONEncoder' is deprecated and will be removed in Flask 2.3", - "ignore:'app.json_encoder' is deprecated and will be removed in Flask 2.3" -] - -[tool.coverage.paths] -source = ["src", "*/site-packages"] -tests = ["tests", "*/tests"] - -[tool.coverage.run] -branch = true -source = ["spiffworkflow_backend", "tests"] - -[tool.coverage.report] -show_missing = true -fail_under = 50 - -[tool.mypy] -strict = true -disallow_any_generics = false -warn_unreachable = true -pretty = true -show_column_numbers = true -show_error_codes = true -show_error_context = true -plugins = "sqlmypy" - -# We get 'error: Module has no attribute "set_context"' for sentry-sdk without this option -implicit_reexport = true - -# allow for subdirs to NOT require __init__.py -namespace_packages = true -explicit_package_bases = false - -[build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index aa6b3cc8..e27f68a5 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -632,9 +632,10 @@ def process_instance_task_list( status_code=400, ) - _parent_bpmn_processes, task_models_of_parent_bpmn_processes = ( - TaskService.task_models_of_parent_bpmn_processes(to_task_model) - ) + ( + _parent_bpmn_processes, + task_models_of_parent_bpmn_processes, + ) = TaskService.task_models_of_parent_bpmn_processes(to_task_model) task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] task_model_query = task_model_query.filter( or_( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 6d236ef6..23d7c3c8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1220,13 +1220,16 @@ class ProcessInstanceProcessor: spiff_tasks_updated[task.id] = task for updated_spiff_task in spiff_tasks_updated.values(): - bpmn_process, task_model, new_task_models, new_json_data_dicts = ( - TaskService.find_or_create_task_model_from_spiff_task( - updated_spiff_task, - self.process_instance_model, - self._serializer, - bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, - ) + ( + bpmn_process, + task_model, + new_task_models, + new_json_data_dicts, + ) = TaskService.find_or_create_task_model_from_spiff_task( + updated_spiff_task, + self.process_instance_model, + self._serializer, + bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, ) bpmn_process_to_use = bpmn_process or task_model.bpmn_process bpmn_process_json_data = TaskService.update_task_data_on_bpmn_process( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index a9ac757c..2c8ca965 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -122,13 +122,16 @@ class TaskModelSavingDelegate(EngineStepDelegate): self.json_data_dicts[json_data_dict["hash"]] = json_data_dict def _update_task_model_with_spiff_task(self, spiff_task: SpiffTask, task_failed: bool = False) -> TaskModel: - bpmn_process, task_model, new_task_models, new_json_data_dicts = ( - TaskService.find_or_create_task_model_from_spiff_task( - spiff_task, - self.process_instance, - self.serializer, - bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, - ) + ( + bpmn_process, + task_model, + new_task_models, + new_json_data_dicts, + ) = TaskService.find_or_create_task_model_from_spiff_task( + spiff_task, + self.process_instance, + self.serializer, + bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, ) bpmn_process_json_data = TaskService.update_task_data_on_bpmn_process( bpmn_process or task_model.bpmn_process, spiff_task.workflow.data From 6c13352c70fe3903d5695793cf1b0c68d02be51f Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 28 Mar 2023 16:19:31 -0400 Subject: [PATCH 125/162] added back in missing required libs w/ burnettk --- poetry.lock | 99 +++++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 6 +++ 2 files changed, 104 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index c955b56f..a63cec16 100644 --- a/poetry.lock +++ b/poetry.lock @@ -61,6 +61,14 @@ category = "dev" optional = false python-versions = ">=3.6.1" +[[package]] +name = "classify-imports" +version = "4.2.0" +description = "Utilities for refactoring imports in python-like syntax." +category = "dev" +optional = false +python-versions = ">=3.7" + [[package]] name = "click" version = "8.1.3" @@ -297,6 +305,17 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" +[[package]] +name = "pre-commit-hooks" +version = "4.4.0" +description = "Some out-of-the-box hooks for pre-commit." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +"ruamel.yaml" = ">=0.15" + [[package]] name = "pycodestyle" version = "2.8.0" @@ -338,6 +357,17 @@ python-versions = ">=3.6" [package.extras] plugins = ["importlib-metadata"] +[[package]] +name = "pyupgrade" +version = "3.3.1" +description = "A tool to automatically upgrade syntax for newer versions." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +tokenize-rt = ">=3.2.0" + [[package]] name = "pyyaml" version = "6.0" @@ -346,6 +376,17 @@ category = "dev" optional = false python-versions = ">=3.6" +[[package]] +name = "reorder-python-imports" +version = "3.9.0" +description = "Tool for reordering python imports" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +classify-imports = ">=4.1" + [[package]] name = "restructuredtext-lint" version = "1.4.0" @@ -357,6 +398,18 @@ python-versions = "*" [package.dependencies] docutils = ">=0.11,<1.0" +[[package]] +name = "ruamel-yaml" +version = "0.17.21" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +category = "dev" +optional = false +python-versions = ">=3" + +[package.extras] +docs = ["ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + [[package]] name = "setuptools" version = "67.6.1" @@ -397,6 +450,22 @@ python-versions = ">=3.8" [package.dependencies] pbr = ">=2.0.0,<2.1.0 || >2.1.0" +[[package]] +name = "tokenize-rt" +version = "5.0.0" +description = "A wrapper around the stdlib `tokenize` which roundtrips." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + [[package]] name = "virtualenv" version = "20.21.0" @@ -417,7 +486,7 @@ test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess [metadata] lock-version = "1.1" python-versions = ">=3.11,<3.12" -content-hash = "b8148152aab04a6f436f9f815c998963dd371aa299736ec0ffb7aff89324bffc" +content-hash = "b47d05a3bedc167232bba9ab07c2c770574018e949d7eb87c65a95a2df84d76b" [metadata.files] attrs = [ @@ -459,6 +528,10 @@ cfgv = [ {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, ] +classify-imports = [ + {file = "classify_imports-4.2.0-py2.py3-none-any.whl", hash = "sha256:dbbc264b70a470ed8c6c95976a11dfb8b7f63df44ed1af87328bbed2663f5161"}, + {file = "classify_imports-4.2.0.tar.gz", hash = "sha256:7abfb7ea92149b29d046bd34573d247ba6e68cc28100c801eba4af17964fc40e"}, +] click = [ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, @@ -546,6 +619,10 @@ pre-commit = [ {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"}, {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"}, ] +pre-commit-hooks = [ + {file = "pre_commit_hooks-4.4.0-py2.py3-none-any.whl", hash = "sha256:fc8837335476221ccccda3d176ed6ae29fe58753ce7e8b7863f5d0f987328fc6"}, + {file = "pre_commit_hooks-4.4.0.tar.gz", hash = "sha256:7011eed8e1a25cde94693da009cba76392194cecc2f3f06c51a44ea6ad6c2af9"}, +] pycodestyle = [ {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, @@ -562,6 +639,10 @@ pygments = [ {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, ] +pyupgrade = [ + {file = "pyupgrade-3.3.1-py2.py3-none-any.whl", hash = "sha256:3b93641963df022d605c78aeae4b5956a5296ea24701eafaef9c487527b77e60"}, + {file = "pyupgrade-3.3.1.tar.gz", hash = "sha256:f88bce38b0ba92c2a9a5063c8629e456e8d919b67d2d42c7ecab82ff196f9813"}, +] pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, @@ -604,9 +685,17 @@ pyyaml = [ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] +reorder-python-imports = [ + {file = "reorder_python_imports-3.9.0-py2.py3-none-any.whl", hash = "sha256:3f9c16e8781f54c944756d0d1eb34a8c863554f7a4eb3693f574fe19b1a29b56"}, + {file = "reorder_python_imports-3.9.0.tar.gz", hash = "sha256:49292ed537829a6bece9fb3746fc1bbe98f52643be5de01a4e13680268a5b0ec"}, +] restructuredtext-lint = [ {file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"}, ] +ruamel-yaml = [ + {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, + {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, +] setuptools = [ {file = "setuptools-67.6.1-py3-none-any.whl", hash = "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078"}, {file = "setuptools-67.6.1.tar.gz", hash = "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a"}, @@ -623,6 +712,14 @@ stevedore = [ {file = "stevedore-5.0.0-py3-none-any.whl", hash = "sha256:bd5a71ff5e5e5f5ea983880e4a1dd1bb47f8feebbb3d95b592398e2f02194771"}, {file = "stevedore-5.0.0.tar.gz", hash = "sha256:2c428d2338976279e8eb2196f7a94910960d9f7ba2f41f3988511e95ca447021"}, ] +tokenize-rt = [ + {file = "tokenize_rt-5.0.0-py2.py3-none-any.whl", hash = "sha256:c67772c662c6b3dc65edf66808577968fb10badfc2042e3027196bed4daf9e5a"}, + {file = "tokenize_rt-5.0.0.tar.gz", hash = "sha256:3160bc0c3e8491312d0485171dea861fc160a240f5f5766b72a1165408d10740"}, +] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] virtualenv = [ {file = "virtualenv-20.21.0-py3-none-any.whl", hash = "sha256:31712f8f2a17bd06234fa97fdf19609e789dd4e3e4bf108c3da71d710651adbc"}, {file = "virtualenv-20.21.0.tar.gz", hash = "sha256:f50e3e60f990a0757c9b68333c9fdaa72d7188caa417f96af9e52407831a3b68"}, diff --git a/pyproject.toml b/pyproject.toml index 5d6b64ca..86d868db 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,3 +26,9 @@ bandit = "1.7.2" flake8-bugbear = "^22.10.25" flake8-docstrings = "^1.6.0" flake8-rst-docstrings = "^0.2.7" +reorder-python-imports = "^3.9.0" +pre-commit-hooks = "^4.0.1" +pyupgrade = "^3.1.0" + +[tool.poetry.group.dev.dependencies] +tomli = "^2.0.1" From 20bdc8dd0fbf8501942e74a36c7ac12b2bc5d3c8 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 28 Mar 2023 16:28:41 -0400 Subject: [PATCH 126/162] use session delete branch of sqlalchemy stubs w/ burnettk --- spiffworkflow-backend/poetry.lock | 16 ++++++++++------ spiffworkflow-backend/pyproject.toml | 4 +--- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index d814b480..9abf75bd 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1911,15 +1911,22 @@ sqlcipher = ["sqlcipher3-binary"] [[package]] name = "sqlalchemy-stubs" version = "0.4" -description = "SQLAlchemy stubs and mypy plugin" +description = "" category = "main" optional = false python-versions = "*" +develop = false [package.dependencies] mypy = ">=0.790" typing-extensions = ">=3.7.4" +[package.source] +type = "git" +url = "https://github.com/burnettk/sqlalchemy-stubs.git" +reference = "scoped-session-delete" +resolved_reference = "d1176931684ce5b327539cc9567d4a1cd8ef1efd" + [[package]] name = "stevedore" version = "5.0.0" @@ -2247,7 +2254,7 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.12" -content-hash = "64eb6f0dce231c627af5b4a0a3940e199c67afd93b880c85d622336ab466be5c" +content-hash = "0071c778fc09995b458298c0212d3c707fff91b06b660ced4e0e3c420e384ffe" [metadata.files] alabaster = [ @@ -3641,10 +3648,7 @@ sqlalchemy = [ {file = "SQLAlchemy-2.0.7-py3-none-any.whl", hash = "sha256:fc67667c8e8c04e5c3250ab2cd51df40bc7c28c7c253d0475b377eff86fe4bb0"}, {file = "SQLAlchemy-2.0.7.tar.gz", hash = "sha256:a4c1e1582492c66dfacc9eab52738f3e64d9a2a380e412668f75aa06e540f649"}, ] -sqlalchemy-stubs = [ - {file = "sqlalchemy-stubs-0.4.tar.gz", hash = "sha256:c665d6dd4482ef642f01027fa06c3d5e91befabb219dc71fc2a09e7d7695f7ae"}, - {file = "sqlalchemy_stubs-0.4-py3-none-any.whl", hash = "sha256:5eec7aa110adf9b957b631799a72fef396b23ff99fe296df726645d01e312aa5"}, -] +sqlalchemy-stubs = [] stevedore = [ {file = "stevedore-5.0.0-py3-none-any.whl", hash = "sha256:bd5a71ff5e5e5f5ea983880e4a1dd1bb47f8feebbb3d95b592398e2f02194771"}, {file = "stevedore-5.0.0.tar.gz", hash = "sha256:2c428d2338976279e8eb2196f7a94910960d9f7ba2f41f3988511e95ca447021"}, diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index 30eb77a5..0450d045 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -72,7 +72,7 @@ types-pytz = "^2022.1.1" # sqlalchemy-stubs = { git = "https://github.com/dropbox/sqlalchemy-stubs.git", rev = "master" } # sqlalchemy-stubs = {develop = true, path = "/Users/kevin/projects/github/sqlalchemy-stubs"} # for now use my fork -# sqlalchemy-stubs = { git = "https://github.com/burnettk/sqlalchemy-stubs.git", rev = "scoped-session-delete" } +sqlalchemy-stubs = { git = "https://github.com/burnettk/sqlalchemy-stubs.git", rev = "scoped-session-delete" } simplejson = "^3.17.6" pytz = "^2022.6" dateparser = "^1.1.2" @@ -83,8 +83,6 @@ flask-simple-crypt = "^0.3.3" cryptography = "^39.0.2" safety = "^2.3.5" sqlalchemy = "^2.0.7" -sqlalchemy-stubs = "^0.4" - [tool.poetry.dev-dependencies] pytest = "^7.1.2" From ed7ccb4aad6bcbde73d455b3702c798b03c36eeb Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 28 Mar 2023 16:42:48 -0400 Subject: [PATCH 127/162] run snyk in ci w/ burnettk --- .github/workflows/backend_tests.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/workflows/backend_tests.yml b/.github/workflows/backend_tests.yml index 517f6297..b8ab1c20 100644 --- a/.github/workflows/backend_tests.yml +++ b/.github/workflows/backend_tests.yml @@ -173,6 +173,15 @@ jobs: name: logs-${{matrix.python}}-${{matrix.os}}-${{matrix.database}} path: "./log/*.log" + security: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@master + - name: Run Snyk to check for vulnerabilities + uses: snyk/actions/python@master + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + run_pre_commit_checks: runs-on: ubuntu-latest defaults: From cef069d02809b99911e6b4d69a84a294d8561dbc Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 28 Mar 2023 16:55:13 -0400 Subject: [PATCH 128/162] run snyk on backend and added marshmallow dep to remove warning w/ burnettk --- .github/workflows/backend_tests.yml | 4 ++- spiffworkflow-backend/poetry.lock | 25 ++++++++++++++++++- spiffworkflow-backend/pyproject.toml | 1 + .../services/process_instance_processor.py | 1 + 4 files changed, 29 insertions(+), 2 deletions(-) diff --git a/.github/workflows/backend_tests.yml b/.github/workflows/backend_tests.yml index b8ab1c20..bec95340 100644 --- a/.github/workflows/backend_tests.yml +++ b/.github/workflows/backend_tests.yml @@ -173,12 +173,14 @@ jobs: name: logs-${{matrix.python}}-${{matrix.os}}-${{matrix.database}} path: "./log/*.log" - security: + snyk: runs-on: ubuntu-latest steps: - uses: actions/checkout@master - name: Run Snyk to check for vulnerabilities uses: snyk/actions/python@master + with: + args: spiffworkflow-backend env: SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index 9abf75bd..356529d2 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1083,6 +1083,25 @@ python-versions = "*" [package.dependencies] marshmallow = ">=2.0.0" +[[package]] +name = "marshmallow-sqlalchemy" +version = "0.29.0" +description = "SQLAlchemy integration with the marshmallow (de)serialization library" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +marshmallow = ">=3.0.0" +packaging = ">=21.3" +SQLAlchemy = ">=1.4.40,<3.0" + +[package.extras] +dev = ["flake8 (==6.0.0)", "flake8-bugbear (==23.2.13)", "pre-commit (==3.1.0)", "pytest", "pytest-lazy-fixture (>=0.6.2)", "tox"] +docs = ["alabaster (==0.7.13)", "sphinx (==6.1.3)", "sphinx-issues (==3.0.1)"] +lint = ["flake8 (==6.0.0)", "flake8-bugbear (==23.2.13)", "pre-commit (==3.1.0)"] +tests = ["pytest", "pytest-lazy-fixture (>=0.6.2)"] + [[package]] name = "mccabe" version = "0.6.1" @@ -2254,7 +2273,7 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.12" -content-hash = "0071c778fc09995b458298c0212d3c707fff91b06b660ced4e0e3c420e384ffe" +content-hash = "9fea44386fbab29102a051a254058909568c4ee3dbd6a402fb91aacbcf1f7fd2" [metadata.files] alabaster = [ @@ -3016,6 +3035,10 @@ marshmallow-enum = [ {file = "marshmallow-enum-1.5.1.tar.gz", hash = "sha256:38e697e11f45a8e64b4a1e664000897c659b60aa57bfa18d44e226a9920b6e58"}, {file = "marshmallow_enum-1.5.1-py2.py3-none-any.whl", hash = "sha256:57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072"}, ] +marshmallow-sqlalchemy = [ + {file = "marshmallow-sqlalchemy-0.29.0.tar.gz", hash = "sha256:3523a774390ef0c1c0f7c708a7519809c5396cf608720f14f55c36f74ff5bbec"}, + {file = "marshmallow_sqlalchemy-0.29.0-py2.py3-none-any.whl", hash = "sha256:3cee0bf61ed10687c0a41448e1916649b28222334a02f7b937c39d1c69c18bee"}, +] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index 0450d045..df2495e0 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -83,6 +83,7 @@ flask-simple-crypt = "^0.3.3" cryptography = "^39.0.2" safety = "^2.3.5" sqlalchemy = "^2.0.7" +marshmallow-sqlalchemy = "^0.29.0" [tool.poetry.dev-dependencies] pytest = "^7.1.2" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 61d0c9ea..01a27e9c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -50,6 +50,7 @@ from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ign from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore + from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.bpmn_process_definition import ( From 6d8d9b8d57bf867e12365a9ab3d0ea7d507e4460 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 28 Mar 2023 17:14:58 -0400 Subject: [PATCH 129/162] ignore dccache files w/ burnettk --- .github/workflows/backend_tests.yml | 2 ++ .gitignore | 1 + 2 files changed, 3 insertions(+) diff --git a/.github/workflows/backend_tests.yml b/.github/workflows/backend_tests.yml index bec95340..f0c9eaf3 100644 --- a/.github/workflows/backend_tests.yml +++ b/.github/workflows/backend_tests.yml @@ -173,6 +173,8 @@ jobs: name: logs-${{matrix.python}}-${{matrix.os}}-${{matrix.database}} path: "./log/*.log" + # burnettk created an account at https://app.snyk.io/org/kevin-jfx + # and added his SNYK_TOKEN secret under the spiff-arena repo. snyk: runs-on: ubuntu-latest steps: diff --git a/.gitignore b/.gitignore index deaccb3a..d391cd85 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ pyrightconfig.json .idea/ t +.dccache From c03a72332fb59e445484e5cddbc492c356fc0b19 Mon Sep 17 00:00:00 2001 From: burnettk Date: Wed, 29 Mar 2023 13:47:03 -0400 Subject: [PATCH 130/162] exact match --- spiffworkflow-backend/keycloak/bin/start_keycloak | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-backend/keycloak/bin/start_keycloak b/spiffworkflow-backend/keycloak/bin/start_keycloak index 242a3375..53f6f17d 100755 --- a/spiffworkflow-backend/keycloak/bin/start_keycloak +++ b/spiffworkflow-backend/keycloak/bin/start_keycloak @@ -26,9 +26,10 @@ fi # https://stackoverflow.com/a/60579344/6090676 container_name="keycloak" -if [[ -n "$(docker ps -qa -f name=$container_name)" ]]; then +container_regex="^keycloak$" +if [[ -n "$(docker ps -qa -f name=$container_regex)" ]]; then echo ":: Found container - $container_name" - if [[ -n "$(docker ps -q -f name=$container_name)" ]]; then + if [[ -n "$(docker ps -q -f name=$container_regex)" ]]; then echo ":: Stopping running container - $container_name" docker stop $container_name fi From a90d568bdef56c725284de263cabec4cf7f7b176 Mon Sep 17 00:00:00 2001 From: burnettk Date: Wed, 29 Mar 2023 14:17:26 -0400 Subject: [PATCH 131/162] delete postgres container if it is there --- spiffworkflow-backend/bin/recreate_db | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/spiffworkflow-backend/bin/recreate_db b/spiffworkflow-backend/bin/recreate_db index 3d8d3db2..505f35e0 100755 --- a/spiffworkflow-backend/bin/recreate_db +++ b/spiffworkflow-backend/bin/recreate_db @@ -44,6 +44,17 @@ if [[ "${1:-}" == "clean" ]]; then # TODO: check to see if the db already exists and we can connect to it. also actually clean it up. # start postgres in background with one db if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-}" == "postgres" ]]; then + container_name = "postgres-spiff" + container_regex = "^postgres-spiff$" + if [[ -n "$(docker ps -qa -f name=$container_regex)" ]]; then + echo ":: Found postgres container - $container_name" + if [[ -n "$(docker ps -q -f name=$container_regex)" ]]; then + echo ":: Stopping running container - $container_name" + docker stop $container_name + fi + echo ":: Removing stopped container - $container_name" + docker rm $container_name + fi if ! docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_unit_testing -c "select 1"; then docker run --name postgres-spiff -p 5432:5432 -e POSTGRES_PASSWORD=spiffworkflow_backend -e POSTGRES_USER=spiffworkflow_backend -e POSTGRES_DB=spiffworkflow_backend_unit_testing -d postgres sleep 4 # classy From e0d96ed837adc4224d7c8c0290b7793c2582431e Mon Sep 17 00:00:00 2001 From: burnettk Date: Wed, 29 Mar 2023 14:18:37 -0400 Subject: [PATCH 132/162] i am terrible --- spiffworkflow-backend/bin/recreate_db | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-backend/bin/recreate_db b/spiffworkflow-backend/bin/recreate_db index 505f35e0..14b23cf8 100755 --- a/spiffworkflow-backend/bin/recreate_db +++ b/spiffworkflow-backend/bin/recreate_db @@ -44,8 +44,8 @@ if [[ "${1:-}" == "clean" ]]; then # TODO: check to see if the db already exists and we can connect to it. also actually clean it up. # start postgres in background with one db if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-}" == "postgres" ]]; then - container_name = "postgres-spiff" - container_regex = "^postgres-spiff$" + container_name="postgres-spiff" + container_regex="^postgres-spiff$" if [[ -n "$(docker ps -qa -f name=$container_regex)" ]]; then echo ":: Found postgres container - $container_name" if [[ -n "$(docker ps -q -f name=$container_regex)" ]]; then From 96a6304e82872de2deb07ecd452c8ce89dd273d9 Mon Sep 17 00:00:00 2001 From: burnettk Date: Wed, 29 Mar 2023 15:51:04 -0400 Subject: [PATCH 133/162] use text for sql --- .../src/spiffworkflow_backend/helpers/db_helper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/helpers/db_helper.py b/spiffworkflow-backend/src/spiffworkflow_backend/helpers/db_helper.py index 57108b6c..9416e551 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/helpers/db_helper.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/helpers/db_helper.py @@ -9,7 +9,7 @@ from spiffworkflow_backend.models.db import db def try_to_connect(start_time: float) -> None: """Try to connect.""" try: - db.first_or_404("select 1") # type: ignore + db.first_or_404(text("select 1")) # type: ignore except sqlalchemy.exc.DatabaseError as exception: if time.time() - start_time > 15: raise exception From 925e784dca39c4d241e2de3a5134a625b254b90d Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 29 Mar 2023 16:03:45 -0400 Subject: [PATCH 134/162] import missing lib w/ burnettk --- .../src/spiffworkflow_backend/helpers/db_helper.py | 1 + 1 file changed, 1 insertion(+) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/helpers/db_helper.py b/spiffworkflow-backend/src/spiffworkflow_backend/helpers/db_helper.py index 9416e551..091dfaff 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/helpers/db_helper.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/helpers/db_helper.py @@ -2,6 +2,7 @@ import time import sqlalchemy +from sqlalchemy.sql import text from spiffworkflow_backend.models.db import db From 2feaf681326e2eecb117bda64556467f6b7d40b2 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 29 Mar 2023 16:49:33 -0400 Subject: [PATCH 135/162] check if data file value is a string before returning it w/ burnettk --- .../spiffworkflow_backend/services/process_instance_service.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index 711ea0d8..0da39886 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -279,7 +279,8 @@ class ProcessInstanceService: yield (identifier, list_value, list_index) if isinstance(list_value, dict) and len(list_value) == 1: for v in list_value.values(): - yield (identifier, v, list_index) + if isinstance(v, str): + yield (identifier, v, list_index) @classmethod def file_data_models_for_data( From 7d7e976b37454e9d240c5b2cd08579e448218ec5 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 30 Mar 2023 11:15:27 -0400 Subject: [PATCH 136/162] added an init method to task service and move a lot of code from workflow execution to it and fixed up the task running test to check things more thoroughly --- .../services/process_instance_processor.py | 11 +- .../services/task_service.py | 140 ++++++++++++++++++ .../services/workflow_execution_service.py | 105 ++----------- .../manual_task_with_subprocesses.bpmn | 42 +++--- .../unit/test_process_instance_processor.py | 111 ++++++++++---- 5 files changed, 268 insertions(+), 141 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 518506bc..1c32efb8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1,6 +1,6 @@ """Process_instance_processor.""" -import copy import _strptime # type: ignore +import copy import decimal import json import logging @@ -1373,7 +1373,7 @@ class ProcessInstanceProcessor: bpmn_process = to_task_model.bpmn_process properties_json = copy.copy(bpmn_process.properties_json) - properties_json['last_task'] = parent_task_model.guid + properties_json["last_task"] = parent_task_model.guid bpmn_process.properties_json = properties_json db.session.add(bpmn_process) db.session.commit() @@ -1818,6 +1818,13 @@ class ProcessInstanceProcessor: user_id=user.id, ) + task_service = TaskService( + process_instance=self.process_instance_model, + serializer=self._serializer, + bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, + ) + task_service.process_parents_and_children_and_save_to_database(spiff_task) + # this is the thing that actually commits the db transaction (on behalf of the other updates above as well) self.save() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index a67a7755..671d415c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -1,5 +1,6 @@ import copy import json +import time from hashlib import sha256 from typing import Optional from typing import Tuple @@ -20,6 +21,8 @@ from spiffworkflow_backend.models.bpmn_process import BpmnProcessNotFoundError from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401 from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.models.task import TaskModel # noqa: F401 @@ -31,6 +34,135 @@ class JsonDataDict(TypedDict): class TaskService: PYTHON_ENVIRONMENT_STATE_KEY = "spiff__python_env_state" + def __init__( + self, + process_instance: ProcessInstanceModel, + serializer: BpmnWorkflowSerializer, + bpmn_definition_to_task_definitions_mappings: dict, + ) -> None: + self.process_instance = process_instance + self.bpmn_definition_to_task_definitions_mappings = bpmn_definition_to_task_definitions_mappings + self.serializer = serializer + + self.bpmn_processes: dict[str, BpmnProcessModel] = {} + self.task_models: dict[str, TaskModel] = {} + self.json_data_dicts: dict[str, JsonDataDict] = {} + self.process_instance_events: dict[str, ProcessInstanceEventModel] = {} + + def save_objects_to_database(self) -> None: + db.session.bulk_save_objects(self.bpmn_processes.values()) + db.session.bulk_save_objects(self.task_models.values()) + db.session.bulk_save_objects(self.process_instance_events.values()) + self.__class__.insert_or_update_json_data_records(self.json_data_dicts) + + def process_parents_and_children_and_save_to_database( + self, + spiff_task: SpiffTask, + ) -> None: + self.process_spiff_task_children(spiff_task) + self.process_spiff_task_parents(spiff_task) + self.save_objects_to_database() + + def process_spiff_task_children( + self, + spiff_task: SpiffTask, + ) -> None: + for child_spiff_task in spiff_task.children: + self.update_task_model_with_spiff_task( + spiff_task=child_spiff_task, + ) + self.process_spiff_task_children( + spiff_task=child_spiff_task, + ) + + def process_spiff_task_parents( + self, + spiff_task: SpiffTask, + ) -> None: + (parent_subprocess_guid, _parent_subprocess) = self.__class__.task_subprocess(spiff_task) + if parent_subprocess_guid is not None: + spiff_task_of_parent_subprocess = spiff_task.workflow._get_outermost_workflow().get_task_from_id( + UUID(parent_subprocess_guid) + ) + + if spiff_task_of_parent_subprocess is not None: + self.update_task_model_with_spiff_task( + spiff_task=spiff_task_of_parent_subprocess, + ) + self.process_spiff_task_parents( + spiff_task=spiff_task_of_parent_subprocess, + ) + + def update_task_model_with_spiff_task( + self, + spiff_task: SpiffTask, + task_failed: bool = False, + ) -> TaskModel: + ( + new_bpmn_process, + task_model, + new_task_models, + new_json_data_dicts, + ) = self.__class__.find_or_create_task_model_from_spiff_task( + spiff_task, + self.process_instance, + self.serializer, + bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, + ) + bpmn_process = new_bpmn_process or task_model.bpmn_process + bpmn_process_json_data = self.__class__.update_task_data_on_bpmn_process( + bpmn_process, spiff_task.workflow.data + ) + self.task_models.update(new_task_models) + self.json_data_dicts.update(new_json_data_dicts) + json_data_dict_list = self.__class__.update_task_model(task_model, spiff_task, self.serializer) + self.task_models[task_model.guid] = task_model + if bpmn_process_json_data is not None: + json_data_dict_list.append(bpmn_process_json_data) + self._update_json_data_dicts_using_list(json_data_dict_list, self.json_data_dicts) + + if task_model.state == "COMPLETED" or task_failed: + event_type = ProcessInstanceEventType.task_completed.value + if task_failed: + event_type = ProcessInstanceEventType.task_failed.value + + # FIXME: some failed tasks will currently not have either timestamp since we only hook into spiff when tasks complete + # which script tasks execute when READY. + timestamp = task_model.end_in_seconds or task_model.start_in_seconds or time.time() + process_instance_event = ProcessInstanceEventModel( + task_guid=task_model.guid, + process_instance_id=self.process_instance.id, + event_type=event_type, + timestamp=timestamp, + ) + self.process_instance_events[task_model.guid] = process_instance_event + + # self.update_bpmn_process(spiff_task.workflow, bpmn_process) + return task_model + + def update_bpmn_process( + self, + spiff_workflow: BpmnWorkflow, + bpmn_process: BpmnProcessModel, + ) -> None: + # import pdb; pdb.set_trace() + new_properties_json = copy.copy(bpmn_process.properties_json) + new_properties_json["last_task"] = str(spiff_workflow.last_task) if spiff_workflow.last_task else None + new_properties_json["success"] = spiff_workflow.success + bpmn_process.properties_json = new_properties_json + + bpmn_process_json_data = self.__class__.update_task_data_on_bpmn_process(bpmn_process, spiff_workflow.data) + if bpmn_process_json_data is not None: + self.json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data + + self.bpmn_processes[bpmn_process.guid or "top_level"] = bpmn_process + + if spiff_workflow.outer_workflow != spiff_workflow: + direct_parent_bpmn_process = BpmnProcessModel.query.filter_by( + id=bpmn_process.direct_parent_process_id + ).first() + self.update_bpmn_process(spiff_workflow.outer_workflow, direct_parent_bpmn_process) + @classmethod def insert_or_update_json_data_records( cls, json_data_hash_to_json_data_dict_mapping: dict[str, JsonDataDict] @@ -395,3 +527,11 @@ class TaskService: # this helps to convert items like datetime objects to be json serializable converted_data: dict = serializer.data_converter.convert(user_defined_state) return converted_data + + @classmethod + def _update_json_data_dicts_using_list( + cls, json_data_dict_list: list[Optional[JsonDataDict]], json_data_dicts: dict[str, JsonDataDict] + ) -> None: + for json_data_dict in json_data_dict_list: + if json_data_dict is not None: + json_data_dicts[json_data_dict["hash"]] = json_data_dict diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 88634456..64e197ef 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -1,7 +1,6 @@ import time from typing import Callable from typing import Optional -from uuid import UUID from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore @@ -10,23 +9,18 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from spiffworkflow_backend.exceptions.api_error import ApiError -from spiffworkflow_backend.models import task_definition -from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.message_instance import MessageInstanceModel from spiffworkflow_backend.models.message_instance_correlation import ( MessageInstanceCorrelationRuleModel, ) from spiffworkflow_backend.models.process_instance import ProcessInstanceModel -from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel -from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.models.task import TaskModel from spiffworkflow_backend.models.task_definition import TaskDefinitionModel # noqa: F401 from spiffworkflow_backend.services.assertion_service import safe_assertion from spiffworkflow_backend.services.process_instance_lock_service import ( ProcessInstanceLockService, ) -from spiffworkflow_backend.services.task_service import JsonDataDict from spiffworkflow_backend.services.task_service import TaskService @@ -67,11 +61,17 @@ class TaskModelSavingDelegate(EngineStepDelegate): self.current_task_model: Optional[TaskModel] = None self.current_task_start_in_seconds: Optional[float] = None - self.task_models: dict[str, TaskModel] = {} - self.json_data_dicts: dict[str, JsonDataDict] = {} - self.process_instance_events: dict[str, ProcessInstanceEventModel] = {} + # self.task_models: dict[str, TaskModel] = {} + # self.json_data_dicts: dict[str, JsonDataDict] = {} + # self.process_instance_events: dict[str, ProcessInstanceEventModel] = {} self.last_completed_spiff_task: Optional[SpiffTask] = None + self.task_service = TaskService( + process_instance=self.process_instance, + serializer=self.serializer, + bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, + ) + def will_complete_task(self, spiff_task: SpiffTask) -> None: if self._should_update_task_model(): self.current_task_start_in_seconds = time.time() @@ -80,7 +80,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): def did_complete_task(self, spiff_task: SpiffTask) -> None: if self._should_update_task_model(): - task_model = self._update_task_model_with_spiff_task(spiff_task) + task_model = self.task_service.update_task_model_with_spiff_task(spiff_task) if self.current_task_start_in_seconds is None: raise Exception("Could not find cached current_task_start_in_seconds. This should never have happend") task_model.start_in_seconds = self.current_task_start_in_seconds @@ -93,13 +93,9 @@ class TaskModelSavingDelegate(EngineStepDelegate): script_engine = bpmn_process_instance.script_engine if hasattr(script_engine, "failing_spiff_task") and script_engine.failing_spiff_task is not None: failing_spiff_task = script_engine.failing_spiff_task - self._update_task_model_with_spiff_task(failing_spiff_task, task_failed=True) + self.task_service.update_task_model_with_spiff_task(failing_spiff_task, task_failed=True) - # import pdb; pdb.set_trace() - db.session.bulk_save_objects(self.task_models.values()) - db.session.bulk_save_objects(self.process_instance_events.values()) - - TaskService.insert_or_update_json_data_records(self.json_data_dicts) + self.task_service.save_objects_to_database() if self.secondary_engine_step_delegate: self.secondary_engine_step_delegate.save(bpmn_process_instance, commit=False) @@ -115,24 +111,8 @@ class TaskModelSavingDelegate(EngineStepDelegate): # ): # self._update_task_model_with_spiff_task(waiting_spiff_task) if self.last_completed_spiff_task is not None: - self._process_spiff_task_children(self.last_completed_spiff_task) - self._process_spiff_task_parents(self.last_completed_spiff_task) - - def _process_spiff_task_children(self, spiff_task: SpiffTask) -> None: - for child_spiff_task in spiff_task.children: - self._update_task_model_with_spiff_task(child_spiff_task) - self._process_spiff_task_children(child_spiff_task) - - def _process_spiff_task_parents(self, spiff_task: SpiffTask) -> None: - (parent_subprocess_guid, _parent_subprocess) = TaskService.task_subprocess(spiff_task) - if parent_subprocess_guid is not None: - spiff_task_of_parent_subprocess = spiff_task.workflow._get_outermost_workflow().get_task_from_id( - UUID(parent_subprocess_guid) - ) - - if spiff_task_of_parent_subprocess is not None: - self._update_task_model_with_spiff_task(spiff_task_of_parent_subprocess) - self._process_spiff_task_parents(spiff_task_of_parent_subprocess) + self.task_service.process_spiff_task_parents(self.last_completed_spiff_task) + self.task_service.process_spiff_task_children(self.last_completed_spiff_task) def _should_update_task_model(self) -> bool: """We need to figure out if we have previously save task info on this process intance. @@ -142,63 +122,6 @@ class TaskModelSavingDelegate(EngineStepDelegate): # return self.process_instance.bpmn_process_id is not None return True - def _update_json_data_dicts_using_list(self, json_data_dict_list: list[Optional[JsonDataDict]]) -> None: - for json_data_dict in json_data_dict_list: - if json_data_dict is not None: - self.json_data_dicts[json_data_dict["hash"]] = json_data_dict - - def _update_task_model_with_spiff_task(self, spiff_task: SpiffTask, task_failed: bool = False) -> TaskModel: - ( - bpmn_process, - task_model, - new_task_models, - new_json_data_dicts, - ) = TaskService.find_or_create_task_model_from_spiff_task( - spiff_task, - self.process_instance, - self.serializer, - bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, - ) - bpmn_process_json_data = TaskService.update_task_data_on_bpmn_process( - bpmn_process or task_model.bpmn_process, spiff_task.workflow.data - ) - # stp = False - # for ntm in new_task_models.values(): - # td = TaskDefinitionModel.query.filter_by(id=ntm.task_definition_id).first() - # if td.bpmn_identifier == 'Start': - # # import pdb; pdb.set_trace() - # stp = True - # print("HEY") - - # if stp: - # # import pdb; pdb.set_trace() - # print("HEY2") - self.task_models.update(new_task_models) - self.json_data_dicts.update(new_json_data_dicts) - json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self.serializer) - self.task_models[task_model.guid] = task_model - if bpmn_process_json_data is not None: - json_data_dict_list.append(bpmn_process_json_data) - self._update_json_data_dicts_using_list(json_data_dict_list) - - if task_model.state == "COMPLETED" or task_failed: - event_type = ProcessInstanceEventType.task_completed.value - if task_failed: - event_type = ProcessInstanceEventType.task_failed.value - - # FIXME: some failed tasks will currently not have either timestamp since we only hook into spiff when tasks complete - # which script tasks execute when READY. - timestamp = task_model.end_in_seconds or task_model.start_in_seconds or time.time() - process_instance_event = ProcessInstanceEventModel( - task_guid=task_model.guid, - process_instance_id=self.process_instance.id, - event_type=event_type, - timestamp=timestamp, - ) - self.process_instance_events[task_model.guid] = process_instance_event - - return task_model - class ExecutionStrategy: """Interface of sorts for a concrete execution strategy.""" diff --git a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn index d2b1d94e..e7817523 100644 --- a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn @@ -4,7 +4,7 @@ Flow_0stlaxe - + Flow_1ygcsbt @@ -23,7 +23,7 @@ - + Flow_09gjylo Flow_0yxus36 @@ -46,7 +46,7 @@ except: we_move_on = False - + Flow_0yxus36 Flow_187mcqe @@ -60,7 +60,7 @@ except: we_move_on == True - + Flow_0lw7sda Flow_1ygcsbt set_top_level_process_script_after_gate = 1 @@ -78,30 +78,36 @@ except: - - - - - - - - - - - - + + + - - + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 0675394b..01624597 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -1,12 +1,12 @@ """Test_process_instance_processor.""" from uuid import UUID -import json import pytest from flask import g from flask.app import Flask from flask.testing import FlaskClient -from SpiffWorkflow.task import TaskState # type: ignore +from SpiffWorkflow.task import Task as SpiffTask # type: ignore +from SpiffWorkflow.task import TaskState from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.test_data import load_test_spec @@ -16,6 +16,7 @@ from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus from spiffworkflow_backend.models.task import TaskModel # noqa: F401 +from spiffworkflow_backend.models.task_definition import TaskDefinitionModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import ( @@ -297,6 +298,7 @@ class TestProcessInstanceProcessor(BaseTest): spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( human_task_one.task_name, processor.bpmn_process_instance ) + assert spiff_manual_task is not None processor.suspend() ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.parent.id), commit=True) @@ -336,7 +338,7 @@ class TestProcessInstanceProcessor(BaseTest): ) processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - with open("before_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2)) + # with open("before_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2)) assert len(process_instance.active_human_tasks) == 1 initial_human_task_id = process_instance.active_human_tasks[0].id assert len(process_instance.active_human_tasks) == 1 @@ -346,20 +348,21 @@ class TestProcessInstanceProcessor(BaseTest): spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - processor.suspend() ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.id), commit=True) process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() processor = ProcessInstanceProcessor(process_instance) - with open("after_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2)) + # with open("after_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2)) processor.resume() processor.do_engine_steps(save=True) human_task_one = process_instance.active_human_tasks[0] spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - import pdb; pdb.set_trace() + import pdb + + pdb.set_trace() assert process_instance.status == "complete" def test_properly_saves_tasks_when_running( @@ -409,6 +412,9 @@ class TestProcessInstanceProcessor(BaseTest): human_task_one = process_instance.active_human_tasks[0] spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) # recreate variables to ensure all bpmn json was recreated from scratch from the db process_instance_relookup = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() @@ -429,34 +435,74 @@ class TestProcessInstanceProcessor(BaseTest): }, } fourth_data_set = {**third_data_set, **{"a": 1, "we_move_on": True}} - fifth_data_set = {**fourth_data_set, **{"validate_only": False, "set_top_level_process_script_after_gate": 1}} + fifth_data_set = {**fourth_data_set, **{"set_top_level_process_script_after_gate": 1}} + sixth_data_set = {**fifth_data_set, **{"validate_only": False, "set_top_level_process_script_after_gate": 1}} expected_task_data = { - "top_level_script": first_data_set, - "manual_task": first_data_set, - "top_level_subprocess_script": second_data_set, - "top_level_subprocess": second_data_set, - "test_process_to_call_subprocess_script": third_data_set, - "top_level_call_activity": third_data_set, - "end_event_of_manual_task_model": third_data_set, - "top_level_subprocess_script_second": fourth_data_set, - "test_process_to_call_subprocess_script_second": fourth_data_set, + "top_level_script": {"data": first_data_set, "bpmn_process_identifier": "top_level_process"}, + "top_level_manual_task_one": {"data": first_data_set, "bpmn_process_identifier": "top_level_process"}, + "top_level_manual_task_two": {"data": first_data_set, "bpmn_process_identifier": "top_level_process"}, + "top_level_subprocess_script": { + "data": second_data_set, + "bpmn_process_identifier": "top_level_subprocess", + }, + "top_level_subprocess": {"data": second_data_set, "bpmn_process_identifier": "top_level_process"}, + "test_process_to_call_subprocess_script": { + "data": third_data_set, + "bpmn_process_identifier": "test_process_to_call_subprocess", + }, + "top_level_call_activity": {"data": third_data_set, "bpmn_process_identifier": "top_level_process"}, + "top_level_manual_task_two_second": { + "data": third_data_set, + "bpmn_process_identifier": "top_level_process", + }, + "top_level_subprocess_script_second": { + "data": fourth_data_set, + "bpmn_process_identifier": "top_level_subprocess", + }, + "top_level_subprocess_second": {"data": fourth_data_set, "bpmn_process_identifier": "top_level_process"}, + "test_process_to_call_subprocess_script_second": { + "data": fourth_data_set, + "bpmn_process_identifier": "test_process_to_call_subprocess", + }, + "top_level_call_activity_second": { + "data": fourth_data_set, + "bpmn_process_identifier": "top_level_process", + }, + "end_event_of_manual_task_model": {"data": fifth_data_set, "bpmn_process_identifier": "top_level_process"}, } - spiff_tasks_checked_once: list = [] + spiff_tasks_checked: list[str] = [] # TODO: also check task data here from the spiff_task directly to ensure we hydrated spiff correctly - def assert_spiff_task_is_in_process(spiff_task_identifier: str, bpmn_process_identifier: str) -> None: - if spiff_task.task_spec.name == spiff_task_identifier: - expected_task_data_key = spiff_task.task_spec.name - if spiff_task.task_spec.name in spiff_tasks_checked_once: + def assert_spiff_task_is_in_process(spiff_task: SpiffTask) -> None: + spiff_task_identifier = spiff_task.task_spec.name + if spiff_task_identifier in expected_task_data: + bpmn_process_identifier = expected_task_data[spiff_task_identifier]["bpmn_process_identifier"] + expected_task_data_key = spiff_task_identifier + if spiff_task_identifier in spiff_tasks_checked: expected_task_data_key = f"{spiff_task.task_spec.name}_second" - expected_python_env_data = expected_task_data[expected_task_data_key] + assert expected_task_data_key not in spiff_tasks_checked + + spiff_tasks_checked.append(expected_task_data_key) + + expected_python_env_data = expected_task_data[expected_task_data_key]["data"] base_failure_message = ( f"Failed on {bpmn_process_identifier} - {spiff_task_identifier} - task data key" f" {expected_task_data_key}." ) + + count_failure_message = ( + f"{base_failure_message} There are more than 2 entries of this task in the db." + " There should only ever be max 2." + ) + task_models_with_bpmn_identifier_count = ( + TaskModel.query.join(TaskDefinitionModel) + .filter(TaskDefinitionModel.bpmn_identifier == spiff_task.task_spec.name) + .count() + ) + assert task_models_with_bpmn_identifier_count < 3, count_failure_message task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() assert task_model.start_in_seconds is not None @@ -466,7 +512,9 @@ class TestProcessInstanceProcessor(BaseTest): task_definition = task_model.task_definition assert task_definition.bpmn_identifier == spiff_task_identifier assert task_definition.bpmn_name == spiff_task_identifier.replace("_", " ").title() - assert task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier + assert ( + task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier + ), base_failure_message message = ( f"{base_failure_message} Expected: {sorted(expected_python_env_data)}. Received:" @@ -474,18 +522,14 @@ class TestProcessInstanceProcessor(BaseTest): ) # TODO: if we split out env data again we will need to use it here instead of json_data # assert task_model.python_env_data() == expected_python_env_data, message + # import pdb; pdb.set_trace() assert task_model.json_data() == expected_python_env_data, message - spiff_tasks_checked_once.append(spiff_task.task_spec.name) all_spiff_tasks = processor_final.bpmn_process_instance.get_tasks() assert len(all_spiff_tasks) > 1 for spiff_task in all_spiff_tasks: assert spiff_task.state == TaskState.COMPLETED - assert_spiff_task_is_in_process( - "test_process_to_call_subprocess_script", "test_process_to_call_subprocess" - ) - assert_spiff_task_is_in_process("top_level_subprocess_script", "top_level_subprocess") - assert_spiff_task_is_in_process("top_level_script", "top_level_process") + assert_spiff_task_is_in_process(spiff_task) if spiff_task.task_spec.name == "top_level_call_activity": # the task id / guid of the call activity gets used as the guid of the bpmn process that it calls @@ -513,7 +557,14 @@ class TestProcessInstanceProcessor(BaseTest): assert direct_parent_process is not None assert direct_parent_process.bpmn_process_definition.bpmn_identifier == "test_process_to_call" - assert processor.get_data() == fifth_data_set + for task_bpmn_identifier in expected_task_data.keys(): + message = ( + f"Expected to have seen a task with a bpmn_identifier of {task_bpmn_identifier} but did not. " + f"Only saw {sorted(spiff_tasks_checked)}" + ) + assert task_bpmn_identifier in spiff_tasks_checked, message + + assert processor.get_data() == sixth_data_set def test_does_not_recreate_human_tasks_on_multiple_saves( self, From 3f365d462c45b07000ec7d1a8b96c173d2a8a789 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 30 Mar 2023 11:16:44 -0400 Subject: [PATCH 137/162] remove pdb w/ burnettk --- .../unit/test_process_instance_processor.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 01624597..e4db3732 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -360,9 +360,6 @@ class TestProcessInstanceProcessor(BaseTest): spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - import pdb - - pdb.set_trace() assert process_instance.status == "complete" def test_properly_saves_tasks_when_running( From 7d1f01ee026fb3bc91b0b1c8cfc1a7453dec499c Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 30 Mar 2023 12:41:42 -0400 Subject: [PATCH 138/162] do not save predicted tasks to the db w/ burnettk --- .../services/task_service.py | 36 ++++++++--- .../services/workflow_execution_service.py | 3 - .../unit/test_process_instance_processor.py | 60 +++++++++++-------- 3 files changed, 64 insertions(+), 35 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 671d415c..1614815c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -68,12 +68,15 @@ class TaskService: spiff_task: SpiffTask, ) -> None: for child_spiff_task in spiff_task.children: - self.update_task_model_with_spiff_task( - spiff_task=child_spiff_task, - ) - self.process_spiff_task_children( - spiff_task=child_spiff_task, - ) + if child_spiff_task._has_state(TaskState.PREDICTED_MASK): + self.__class__.remove_spiff_task_from_parent(child_spiff_task, self.task_models) + else: + self.update_task_model_with_spiff_task( + spiff_task=child_spiff_task, + ) + self.process_spiff_task_children( + spiff_task=child_spiff_task, + ) def process_spiff_task_parents( self, @@ -137,7 +140,7 @@ class TaskService: ) self.process_instance_events[task_model.guid] = process_instance_event - # self.update_bpmn_process(spiff_task.workflow, bpmn_process) + self.update_bpmn_process(spiff_task.workflow, bpmn_process) return task_model def update_bpmn_process( @@ -315,7 +318,7 @@ class TaskService: if "subprocess_specs" in bpmn_process_dict: bpmn_process_dict.pop("subprocess_specs") - new_task_models = {} + new_task_models: dict[str, TaskModel] = {} new_json_data_dicts: dict[str, JsonDataDict] = {} bpmn_process = None @@ -386,7 +389,12 @@ class TaskService: if task_properties["task_spec"] == "Root": continue + # we are going to avoid saving likely and maybe tasks to the db. + # that means we need to remove them from their parents' lists of children as well. spiff_task = spiff_workflow.get_task_from_id(UUID(task_id)) + if spiff_task._has_state(TaskState.PREDICTED_MASK): + cls.remove_spiff_task_from_parent(spiff_task, new_task_models) + continue task_model = TaskModel.query.filter_by(guid=task_id).first() if task_model is None: @@ -406,6 +414,18 @@ class TaskService: new_json_data_dicts[python_env_dict["hash"]] = python_env_dict return (bpmn_process, new_task_models, new_json_data_dicts) + @classmethod + def remove_spiff_task_from_parent(cls, spiff_task: SpiffTask, task_models: dict[str, TaskModel]) -> None: + """Removes the given spiff task from its parent and then updates the task_models dict with the changes.""" + spiff_task_parent_guid = str(spiff_task.parent.id) + spiff_task_guid = str(spiff_task.id) + if spiff_task_parent_guid in task_models: + parent_task_model = task_models[spiff_task_parent_guid] + new_parent_properties_json = copy.copy(parent_task_model.properties_json) + new_parent_properties_json["children"].remove(spiff_task_guid) + parent_task_model.properties_json = new_parent_properties_json + task_models[spiff_task_parent_guid] = parent_task_model + @classmethod def update_task_data_on_bpmn_process( cls, bpmn_process: BpmnProcessModel, bpmn_process_data_dict: dict diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 64e197ef..3fd433e5 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -61,9 +61,6 @@ class TaskModelSavingDelegate(EngineStepDelegate): self.current_task_model: Optional[TaskModel] = None self.current_task_start_in_seconds: Optional[float] = None - # self.task_models: dict[str, TaskModel] = {} - # self.json_data_dicts: dict[str, JsonDataDict] = {} - # self.process_instance_events: dict[str, ProcessInstanceEventModel] = {} self.last_completed_spiff_task: Optional[SpiffTask] = None self.task_service = TaskService( diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index e4db3732..c07f2c79 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -418,54 +418,59 @@ class TestProcessInstanceProcessor(BaseTest): processor_final = ProcessInstanceProcessor(process_instance_relookup) assert process_instance_relookup.status == "complete" - first_data_set = {"set_in_top_level_script": 1} - second_data_set = { - **first_data_set, + data_set_1 = {"set_in_top_level_script": 1} + data_set_2 = { + **data_set_1, **{"set_in_top_level_subprocess": 1, "we_move_on": False}, } - third_data_set = { - **second_data_set, + data_set_3 = { + **data_set_2, **{ - "set_in_test_process_to_call_script": 1, "set_in_test_process_to_call_subprocess_subprocess_script": 1, "set_in_test_process_to_call_subprocess_script": 1, }, } - fourth_data_set = {**third_data_set, **{"a": 1, "we_move_on": True}} - fifth_data_set = {**fourth_data_set, **{"set_top_level_process_script_after_gate": 1}} - sixth_data_set = {**fifth_data_set, **{"validate_only": False, "set_top_level_process_script_after_gate": 1}} + data_set_4 = { + **data_set_3, + **{ + "set_in_test_process_to_call_script": 1, + }, + } + data_set_5 = {**data_set_4, **{"a": 1, "we_move_on": True}} + data_set_6 = {**data_set_5, **{"set_top_level_process_script_after_gate": 1}} + data_set_7 = {**data_set_6, **{"validate_only": False, "set_top_level_process_script_after_gate": 1}} expected_task_data = { - "top_level_script": {"data": first_data_set, "bpmn_process_identifier": "top_level_process"}, - "top_level_manual_task_one": {"data": first_data_set, "bpmn_process_identifier": "top_level_process"}, - "top_level_manual_task_two": {"data": first_data_set, "bpmn_process_identifier": "top_level_process"}, + "top_level_script": {"data": data_set_1, "bpmn_process_identifier": "top_level_process"}, + "top_level_manual_task_one": {"data": data_set_1, "bpmn_process_identifier": "top_level_process"}, + "top_level_manual_task_two": {"data": data_set_1, "bpmn_process_identifier": "top_level_process"}, "top_level_subprocess_script": { - "data": second_data_set, + "data": data_set_2, "bpmn_process_identifier": "top_level_subprocess", }, - "top_level_subprocess": {"data": second_data_set, "bpmn_process_identifier": "top_level_process"}, + "top_level_subprocess": {"data": data_set_2, "bpmn_process_identifier": "top_level_process"}, "test_process_to_call_subprocess_script": { - "data": third_data_set, + "data": data_set_3, "bpmn_process_identifier": "test_process_to_call_subprocess", }, - "top_level_call_activity": {"data": third_data_set, "bpmn_process_identifier": "top_level_process"}, + "top_level_call_activity": {"data": data_set_4, "bpmn_process_identifier": "top_level_process"}, "top_level_manual_task_two_second": { - "data": third_data_set, + "data": data_set_4, "bpmn_process_identifier": "top_level_process", }, "top_level_subprocess_script_second": { - "data": fourth_data_set, + "data": data_set_5, "bpmn_process_identifier": "top_level_subprocess", }, - "top_level_subprocess_second": {"data": fourth_data_set, "bpmn_process_identifier": "top_level_process"}, + "top_level_subprocess_second": {"data": data_set_5, "bpmn_process_identifier": "top_level_process"}, "test_process_to_call_subprocess_script_second": { - "data": fourth_data_set, + "data": data_set_5, "bpmn_process_identifier": "test_process_to_call_subprocess", }, "top_level_call_activity_second": { - "data": fourth_data_set, + "data": data_set_5, "bpmn_process_identifier": "top_level_process", }, - "end_event_of_manual_task_model": {"data": fifth_data_set, "bpmn_process_identifier": "top_level_process"}, + "end_event_of_manual_task_model": {"data": data_set_6, "bpmn_process_identifier": "top_level_process"}, } spiff_tasks_checked: list[str] = [] @@ -496,6 +501,7 @@ class TestProcessInstanceProcessor(BaseTest): ) task_models_with_bpmn_identifier_count = ( TaskModel.query.join(TaskDefinitionModel) + .filter(TaskModel.process_instance_id == process_instance_relookup.id) .filter(TaskDefinitionModel.bpmn_identifier == spiff_task.task_spec.name) .count() ) @@ -519,7 +525,6 @@ class TestProcessInstanceProcessor(BaseTest): ) # TODO: if we split out env data again we will need to use it here instead of json_data # assert task_model.python_env_data() == expected_python_env_data, message - # import pdb; pdb.set_trace() assert task_model.json_data() == expected_python_env_data, message all_spiff_tasks = processor_final.bpmn_process_instance.get_tasks() @@ -561,7 +566,14 @@ class TestProcessInstanceProcessor(BaseTest): ) assert task_bpmn_identifier in spiff_tasks_checked, message - assert processor.get_data() == sixth_data_set + task_models_that_are_predicted_count = ( + TaskModel.query.filter(TaskModel.process_instance_id == process_instance_relookup.id) + .filter(TaskModel.state.in_(["LIKELY", "MAYBE"])) # type: ignore + .count() + ) + assert task_models_that_are_predicted_count == 0 + + assert processor.get_data() == data_set_7 def test_does_not_recreate_human_tasks_on_multiple_saves( self, From c5806ee53d8f6543d554a4512a62cbab0adcf5fd Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 30 Mar 2023 15:25:44 -0400 Subject: [PATCH 139/162] fixed some failing tests except for test_send_event --- .../services/task_service.py | 15 +++++------ .../services/workflow_execution_service.py | 7 +++-- .../tests/data/manual_task/manual_task.bpmn | 8 +++--- .../integration/test_process_api.py | 26 +++++-------------- .../unit/test_process_instance_processor.py | 14 ++++++++-- 5 files changed, 33 insertions(+), 37 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 1614815c..fef8265f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -70,13 +70,13 @@ class TaskService: for child_spiff_task in spiff_task.children: if child_spiff_task._has_state(TaskState.PREDICTED_MASK): self.__class__.remove_spiff_task_from_parent(child_spiff_task, self.task_models) - else: - self.update_task_model_with_spiff_task( - spiff_task=child_spiff_task, - ) - self.process_spiff_task_children( - spiff_task=child_spiff_task, - ) + continue + self.update_task_model_with_spiff_task( + spiff_task=child_spiff_task, + ) + self.process_spiff_task_children( + spiff_task=child_spiff_task, + ) def process_spiff_task_parents( self, @@ -148,7 +148,6 @@ class TaskService: spiff_workflow: BpmnWorkflow, bpmn_process: BpmnProcessModel, ) -> None: - # import pdb; pdb.set_trace() new_properties_json = copy.copy(bpmn_process.properties_json) new_properties_json["last_task"] = str(spiff_workflow.last_task) if spiff_workflow.last_task else None new_properties_json["success"] = spiff_workflow.success diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 3fd433e5..43927b83 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -126,13 +126,12 @@ class ExecutionStrategy: def __init__(self, delegate: EngineStepDelegate): """__init__.""" self.delegate = delegate - self.bpmn_process_instance = None def do_engine_steps(self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None) -> None: pass - def save(self) -> None: - self.delegate.save(self.bpmn_process_instance) + def save(self, bpmn_process_instance: BpmnWorkflow) -> None: + self.delegate.save(bpmn_process_instance) class GreedyExecutionStrategy(ExecutionStrategy): @@ -238,7 +237,7 @@ class WorkflowExecutionService: raise ApiError.from_workflow_exception("task_error", str(swe), swe) from swe finally: - self.execution_strategy.save() + self.execution_strategy.save(self.bpmn_process_instance) db.session.commit() if save: diff --git a/spiffworkflow-backend/tests/data/manual_task/manual_task.bpmn b/spiffworkflow-backend/tests/data/manual_task/manual_task.bpmn index f4d0190b..ac1486e4 100644 --- a/spiffworkflow-backend/tests/data/manual_task/manual_task.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task/manual_task.bpmn @@ -7,8 +7,8 @@ Flow_0nnh2x9 - - + + ## Hello @@ -16,7 +16,7 @@ Flow_0nnh2x9 - + Flow_0stlaxe Flow_1pmem7s @@ -31,7 +31,7 @@ - + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 89fda503..c5623f47 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2616,6 +2616,8 @@ class TestProcessApi(BaseTest): content_type="application/json", data=json.dumps(data), ) + assert response.status_code == 200 + assert response.json is not None assert response.json["status"] == "complete" response = client.get( @@ -2641,9 +2643,9 @@ class TestProcessApi(BaseTest): ) -> None: """Test_script_unit_test_run.""" process_group_id = "test_group" - process_model_id = "process_navigation" - bpmn_file_name = "process_navigation.bpmn" - bpmn_file_location = "process_navigation" + process_model_id = "manual_task" + bpmn_file_name = "manual_task.bpmn" + bpmn_file_location = "manual_task" process_model_identifier = self.create_group_and_model_with_bpmn( client=client, user=with_super_admin_user, @@ -2674,25 +2676,11 @@ class TestProcessApi(BaseTest): headers=self.logged_in_headers(with_super_admin_user), ) - data = { - "dateTime": "PT1H", - "external": True, - "internal": True, - "label": "Event_0e4owa3", - "typename": "TimerEventDefinition", - } - response = client.post( - f"/v1.0/send-event/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", - headers=self.logged_in_headers(with_super_admin_user), - content_type="application/json", - data=json.dumps(data), - ) - response = client.get( f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/task-info", headers=self.logged_in_headers(with_super_admin_user), ) - assert len(response.json) == 9 + assert len(response.json) == 7 human_task = next(task for task in response.json if task["bpmn_identifier"] == "manual_task_one") response = client.post( @@ -2711,7 +2699,7 @@ class TestProcessApi(BaseTest): headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 - assert len(response.json) == 9 + assert len(response.json) == 7 def setup_initial_groups_for_move_tests(self, client: FlaskClient, with_super_admin_user: UserModel) -> None: """Setup_initial_groups_for_move_tests.""" diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index c07f2c79..13a22ced 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -353,14 +353,24 @@ class TestProcessInstanceProcessor(BaseTest): process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() processor = ProcessInstanceProcessor(process_instance) - # with open("after_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2)) processor.resume() processor.do_engine_steps(save=True) human_task_one = process_instance.active_human_tasks[0] spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - assert process_instance.status == "complete" + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + processor = ProcessInstanceProcessor(process_instance) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + + # recreate variables to ensure all bpmn json was recreated from scratch from the db + process_instance_relookup = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + assert process_instance_relookup.status == "complete" def test_properly_saves_tasks_when_running( self, From 5754f44c259026d59b1feaf4292a01c22a5eed60 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 30 Mar 2023 16:30:34 -0400 Subject: [PATCH 140/162] WIP trying to get resetting to a task within a subprocess working w/ burnettk --- .../services/process_instance_processor.py | 1 + .../services/workflow_execution_service.py | 3 ++ .../integration/test_process_api.py | 1 + .../unit/test_process_instance_processor.py | 32 +++++++++++-------- 4 files changed, 24 insertions(+), 13 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 1c32efb8..e572c5ae 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1348,6 +1348,7 @@ class ProcessInstanceProcessor: for task_to_update in tasks_to_update: # print(f"task_to_update: {task_to_update}") + print(f"task_to_update.state: {task_to_update.state}") TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) # TaskService.reset_task_model(task_to_update, state=task_to_update.state, commit=commit) # if task_to_update.task_definition.bpmn_identifier != 'top_level_process_script_after_gate': diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 43927b83..495ab310 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -72,6 +72,8 @@ class TaskModelSavingDelegate(EngineStepDelegate): def will_complete_task(self, spiff_task: SpiffTask) -> None: if self._should_update_task_model(): self.current_task_start_in_seconds = time.time() + # import pdb; pdb.set_trace() + spiff_task.task_spec._predict(spiff_task, mask=TaskState.NOT_FINISHED_MASK) if self.secondary_engine_step_delegate: self.secondary_engine_step_delegate.will_complete_task(spiff_task) @@ -108,6 +110,7 @@ class TaskModelSavingDelegate(EngineStepDelegate): # ): # self._update_task_model_with_spiff_task(waiting_spiff_task) if self.last_completed_spiff_task is not None: + import pdb; pdb.set_trace() self.task_service.process_spiff_task_parents(self.last_completed_spiff_task) self.task_service.process_spiff_task_children(self.last_completed_spiff_task) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index c5623f47..84d970bd 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2618,6 +2618,7 @@ class TestProcessApi(BaseTest): ) assert response.status_code == 200 assert response.json is not None + import pdb; pdb.set_trace() assert response.json["status"] == "complete" response = client.get( diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 13a22ced..3d949388 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -347,30 +347,36 @@ class TestProcessInstanceProcessor(BaseTest): human_task_one = process_instance.active_human_tasks[0] spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + + ### NOTES: + # somehow we are hosing the task state so that when completing tasks of a subprocess, the task AFTER the subprocess task + # is not marked READY but instead stays as FUTURE. Running things like: + # self.last_completed_spiff_task.task_spec._update(self.last_completed_spiff_task) + # and + # self.last_completed_spiff_task.task_spec._predict(self.last_completed_spiff_task, mask=TaskState.NOT_FINISHED_MASK) + # did not help. processor.suspend() - ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.id), commit=True) + # import pdb; pdb.set_trace() + task_model_to_reset_to = TaskModel.query.join(TaskDefinitionModel).filter(TaskDefinitionModel.bpmn_identifier == 'top_level_subprocess_script').order_by(TaskModel.id.desc()).first() + assert task_model_to_reset_to is not None + ProcessInstanceProcessor.reset_process(process_instance, task_model_to_reset_to.guid, commit=True) + # import pdb; pdb.set_trace() process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() processor = ProcessInstanceProcessor(process_instance) processor.resume() processor.do_engine_steps(save=True) + import pdb; pdb.set_trace() + assert len(process_instance.active_human_tasks) == 1 human_task_one = process_instance.active_human_tasks[0] spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() - processor = ProcessInstanceProcessor(process_instance) - human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) - ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) - ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - - # recreate variables to ensure all bpmn json was recreated from scratch from the db - process_instance_relookup = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() - assert process_instance_relookup.status == "complete" + assert process_instance.status == "complete" def test_properly_saves_tasks_when_running( self, From 57aa8d45b1c9c02325df1a8bbe5fd3e2c13851c0 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 30 Mar 2023 16:54:31 -0400 Subject: [PATCH 141/162] truncate process model metadata w/ burnettk --- .../services/process_instance_processor.py | 2 +- .../nested-task-data-structure.bpmn | 2 +- .../spiffworkflow_backend/unit/test_process_model.py | 11 ++++++++--- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 01a27e9c..7dbde949 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -829,7 +829,7 @@ class ProcessInstanceProcessor: process_instance_id=self.process_instance_model.id, key=key, ) - pim.value = data_for_key + pim.value = str(data_for_key)[0:255] db.session.add(pim) db.session.commit() diff --git a/spiffworkflow-backend/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn b/spiffworkflow-backend/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn index 7452216a..36acf7ab 100644 --- a/spiffworkflow-backend/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn +++ b/spiffworkflow-backend/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn @@ -21,7 +21,7 @@ outer['time'] = time.time_ns() Flow_18gs4jt Flow_1flxgry - outer["inner"] = 'sweet2' + outer["inner"] = 'sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2sweet2END_THIS_WILL_TRUNCATE_HERE' diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py index 22f92111..40a9c96e 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py @@ -1,4 +1,6 @@ """Process Model.""" +import re + from flask.app import Flask from flask.testing import FlaskClient from tests.spiffworkflow_backend.helpers.base_test import BaseTest @@ -21,7 +23,7 @@ class TestProcessModel(BaseTest): def test_initializes_files_as_empty_array(self) -> None: """Test_initializes_files_as_empty_array.""" - process_model_one = self.create_test_process_model(id="model_one", display_name="Model One") + process_model_one = self._create_test_process_model(id="model_one", display_name="Model One") assert process_model_one.files == [] def test_can_run_process_model_with_call_activities_when_in_same_process_model_directory( @@ -130,14 +132,17 @@ class TestProcessModel(BaseTest): process_instance_id=process_instance.id, key="awesome_var" ).first() assert process_instance_metadata_awesome_var is not None - assert process_instance_metadata_awesome_var.value == "sweet2" + + # notion 160: ensure that we truncate long values to 255 characters + assert re.match(r"^sweet2.*END$", process_instance_metadata_awesome_var.value) + assert len(process_instance_metadata_awesome_var.value) == 255 process_instance_metadata_awesome_var = ProcessInstanceMetadataModel.query.filter_by( process_instance_id=process_instance.id, key="invoice_number" ).first() assert process_instance_metadata_awesome_var is not None assert process_instance_metadata_awesome_var.value == "123" - def create_test_process_model(self, id: str, display_name: str) -> ProcessModelInfo: + def _create_test_process_model(self, id: str, display_name: str) -> ProcessModelInfo: """Create_test_process_model.""" return ProcessModelInfo( id=id, From 87762801a35eb2ce2514d53527f5811f8af79c0b Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 30 Mar 2023 17:19:37 -0400 Subject: [PATCH 142/162] test user list update --- .../keycloak/bin/add_test_users_to_keycloak | 20 +- .../realm_exports/spiffworkflow-realm.json | 653 ++---------------- .../keycloak/test_user_lists/sartography | 18 +- .../keycloak/test_user_lists/status | 190 +++-- 4 files changed, 144 insertions(+), 737 deletions(-) diff --git a/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak b/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak index c53fe438..f6a39aae 100755 --- a/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak +++ b/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak @@ -54,9 +54,10 @@ backend_token=$(jq -r '.access_token' <<< "$result") function add_user() { local user_email=$1 local username=$2 - local user_attribute_one=$3 + local pass=$3 + local user_attribute_one=$4 - local credentials='{"type":"password","value":"'"${username}"'","temporary":false}' + local credentials='{"type":"password","value":"'"${pass}"'","temporary":false}' local data='{"email":"'"${user_email}"'", "enabled":"true", "username":"'"${username}"'", "credentials":['"${credentials}"']' if [[ -n "$user_attribute_one" ]]; then @@ -79,11 +80,16 @@ while read -r input_line; do if ! grep -qE '^#' <<<"$input_line" ; then if [[ "$first_line_processed" == "false" ]]; then email_header=$(awk -F ',' '{print $1}' <<<"$input_line") + pass_header=$(awk -F ',' '{print $2}' <<<"$input_line") if [[ "$email_header" != "email" ]]; then >&2 echo "ERROR: the first column in the first row must be email." exit 1 fi - custom_attribute_one=$(awk -F ',' '{print $2}' <<<"$input_line") + if [[ "$pass_header" != "pass" ]]; then + >&2 echo "ERROR: the first column in the first row must be pass." + exit 1 + fi + custom_attribute_one=$(awk -F ',' '{print $3}' <<<"$input_line") first_line_processed="true" elif [[ -n "$input_line" ]]; then echo "Importing: $input_line" @@ -95,8 +101,10 @@ while read -r input_line; do exit 1 fi - user_attribute_one=$(awk -F ',' '{print $2}' <<<"$input_line") - http_code=$(add_user "$user_email" "$username" "$user_attribute_one") + password=$(awk -F ',' '{print $2}' <<<"$input_line") + echo "Password: $password" + user_attribute_one=$(awk -F ',' '{print $3}' <<<"$input_line") + http_code=$(add_user "$user_email" "$username" "$password" "$user_attribute_one") if [[ "$http_code" == "409" ]]; then user_info=$(curl --fail --silent --location --request GET "${KEYCLOAK_BASE_URL}/admin/realms/${keycloak_realm}/users?username=${username}&exact=true" \ @@ -112,7 +120,7 @@ while read -r input_line; do -H 'Content-Type: application/json' \ -H "Authorization: Bearer $backend_token" - http_code=$(add_user "$user_email" "$username" "$user_attribute_one") + http_code=$(add_user "$user_email" "$username" "$password" "$user_attribute_one") fi if [[ "$http_code" != "201" ]]; then >&2 echo "ERROR: Failed to create user: ${user_email} with http_code: ${http_code}" diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index 27239bca..c6ca21ed 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -807,213 +807,6 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] - }, { - "id" : "3730e6ec-4b0c-4fbe-a34b-2cd43d8c9854", - "createdTimestamp" : 1678461819329, - "username" : "core10.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core10.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "225" ] - }, - "credentials" : [ { - "id" : "223cbe3b-d432-4707-b826-6220caa14bd7", - "type" : "password", - "createdDate" : 1678461819366, - "secretData" : "{\"value\":\"Mp81SeHhDQa2U/i/S2CfPnKvjwRDJCKZMgCQX3BkZWE/a6791XjXmwB8DE5qS8tiST68BQoQRuc1VCiNKL3zaQ==\",\"salt\":\"Jb0BB2tIQ+HUJQIFr82g9w==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "88e7ca9e-1825-4d4a-9f60-29368023c67b", - "createdTimestamp" : 1678461819411, - "username" : "core11.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core11.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "226" ] - }, - "credentials" : [ { - "id" : "46dc7656-b70b-4d86-80fc-aa08d807be2b", - "type" : "password", - "createdDate" : 1678461819447, - "secretData" : "{\"value\":\"hgBEI05fhPMVx47O9KmnrTvPomKJXK0IjEHZ30zM3fu6maT2fOHGh4+ti6MVhKqQeXKZR4wtC3i1RoqLNOsjpQ==\",\"salt\":\"BWxZnmTfzggGqzVKkFY+vQ==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "6504eeda-be24-488b-ace4-1d50a7a354bc", - "createdTimestamp" : 1678461819494, - "username" : "core12.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core12.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "227" ] - }, - "credentials" : [ { - "id" : "bde05120-10b5-4796-b559-9238847d2604", - "type" : "password", - "createdDate" : 1678461819527, - "secretData" : "{\"value\":\"njdHu9w1jeSvaNbdwVf0X+3TZaHmZVwUc+/TOAtv05eNGBIW9Vt1+500AsLReHS8lb/I3fglr5I9ZskYHUc0fA==\",\"salt\":\"lH6xJHf1jQGX1j4bYH6GXA==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "ed249cd3-c66e-46e0-9184-1e6468b57afa", - "createdTimestamp" : 1678461819557, - "username" : "core13.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core13.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "228" ] - }, - "credentials" : [ { - "id" : "81b65ee8-6fcd-4cd6-8886-aa44feefa55f", - "type" : "password", - "createdDate" : 1678461819592, - "secretData" : "{\"value\":\"ywBsPI0pdoCOjNWinYNZQBBzL3NRp2u2jv3aXBGxneTo9v8XaVweGL52HIyTikdfmX46TEMIH6LQopaYFcwhng==\",\"salt\":\"GTw17rcE4UvB/Dx4UUkAog==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "1b7b3aa4-b0fe-46c7-a9a1-3fb3c99c7576", - "createdTimestamp" : 1678461819624, - "username" : "core14.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core14.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "229" ] - }, - "credentials" : [ { - "id" : "0c24ffe5-cb97-4b0d-a0d1-920de540742e", - "type" : "password", - "createdDate" : 1678461819658, - "secretData" : "{\"value\":\"3RXjoEUpqxH6RM0sZUf393H9nzyVADId8IWNru9fWgdQg6tHaZezRBZ/lRRERvvdmLiupQ3cMsL/HHvPRQA6tA==\",\"salt\":\"zkaBJY+Dvg5Az74MACBBUg==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "8e2b39a8-a744-4345-928f-da1a36f15f46", - "createdTimestamp" : 1678461819686, - "username" : "core15.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core15.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "230" ] - }, - "credentials" : [ { - "id" : "14a91e80-cec9-44cf-aa85-28e0043f660d", - "type" : "password", - "createdDate" : 1678461819720, - "secretData" : "{\"value\":\"JnP9MpLDM92LuzJnEVUy0vzm9LoSttezepYu4ANfJlmcS6cUvnnh1yDKm43I2YzM4+mXRdxJyoLZTk/ZpmshSQ==\",\"salt\":\"5CKz6mrqr4IaUeEuu/hR9Q==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "ffe3e131-9479-49d2-8125-83dc86a16478", - "createdTimestamp" : 1678461819751, - "username" : "core16.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core16.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "231" ] - }, - "credentials" : [ { - "id" : "cf010c6c-035e-4a2f-ab74-5617fd23c808", - "type" : "password", - "createdDate" : 1678461819786, - "secretData" : "{\"value\":\"WeZ+YxLVtjRhlLZnb6j3AfecmQEsvTm3iM8ZqQthgq9c4BuZ23qare3PEVlRCA1+Oj5sAOOS1hs9iab6ia49wQ==\",\"salt\":\"uai22Okju4dg7GfO7p3C1Q==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "94bcef08-2af1-4805-864d-cbabcd851d67", - "createdTimestamp" : 1678461819815, - "username" : "core17.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core17.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "232" ] - }, - "credentials" : [ { - "id" : "c7a58ff0-7c56-464b-9009-b6e845075087", - "type" : "password", - "createdDate" : 1678461819850, - "secretData" : "{\"value\":\"R53+DKM2eyUXDYJDjW9BtwdY+x0/CUhgUDDYjip7BvGAepzRqPvZVbCLqJjFf6YctO4Va7F65n4evd40GbO7fQ==\",\"salt\":\"U/ia7H+I4yeD3bpP1vnH6Q==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "7b86b997-de98-478c-8550-cfca65e40c33", - "createdTimestamp" : 1679060366901, - "username" : "core18.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core18.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "233" ] - }, - "credentials" : [ { - "id" : "55ca2bd7-6f60-4f04-be21-df6300ca9442", - "type" : "password", - "createdDate" : 1679060366954, - "secretData" : "{\"value\":\"hC/O8LJ8/y/nXLmRFgRazOX9PXMHkowYH1iHUB4Iw9jzc8IMMv8dFrxu7XBklfyz7CPc1bmgl0k29jygRZYHlg==\",\"salt\":\"4R17tmLrHWyFAMvrfLMETQ==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] }, { "id" : "3b81b45e-759b-4d7a-aa90-adf7b447208c", "createdTimestamp" : 1676302140358, @@ -1106,98 +899,6 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] - }, { - "id" : "5119e7f6-9b0f-4e04-824a-9c5ef87fdb42", - "createdTimestamp" : 1678126023934, - "username" : "core6.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core6.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "199" ] - }, - "credentials" : [ { - "id" : "f219e401-0fdb-4b73-be77-d01bb0caa448", - "type" : "password", - "createdDate" : 1678126023967, - "secretData" : "{\"value\":\"zdr8Psnlti56oHo8f/wuuZb5p7ZRpDQKHGFsrkjtl0VaOn2uNOeUmCqXLQ4UGyGssK8Qn8s8R62yrFKUNeeSjA==\",\"salt\":\"9MlVZL9xo3OWvlsvyXt0UQ==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "89d57569-1a90-412a-ba01-aa8ff19ed171", - "createdTimestamp" : 1678461819085, - "username" : "core7.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core7.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "222" ] - }, - "credentials" : [ { - "id" : "cfeb64ec-a38a-4f95-b0cd-28b5501524d8", - "type" : "password", - "createdDate" : 1678461819121, - "secretData" : "{\"value\":\"w4WKqWXTlin6MPQi0mO+Bvktb2zuMdIylqNNxYgBCnd5vwzq2widp7G9f3wz8Iy0wY8K2rqBjdSmmbZ7fJ8//Q==\",\"salt\":\"SRuRkx3572cDGoWhqAQGLQ==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "81efd609-b6ae-42ec-800e-d6fcca2f8282", - "createdTimestamp" : 1678461819150, - "username" : "core8.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core8.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "223" ] - }, - "credentials" : [ { - "id" : "0b476f6f-7aa4-4f75-bf5c-ac47521f3900", - "type" : "password", - "createdDate" : 1678461819185, - "secretData" : "{\"value\":\"ALWI40OEZUhMJ1CQTV9wSrwQUWfYNiYbN2JTmCUfbLUcUbY+rTrKOfAn9Mc/bCEFJomiTb9u/eqnkKX/lCGgew==\",\"salt\":\"wW2T8PkpCnnPfMNwpPVUVQ==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "a1233c9f-e59a-48dc-aaa7-1513f1aa5654", - "createdTimestamp" : 1678461819225, - "username" : "core9.contributor", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "core9.contributor@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "224" ] - }, - "credentials" : [ { - "id" : "907b9d46-b8a3-4a14-ab89-b07d2c4d431a", - "type" : "password", - "createdDate" : 1678461819266, - "secretData" : "{\"value\":\"v9aFLHzLyiwWuAxNeVtRjtXzRtug6KU2f19SbS8dBdPC0mlHORoLYXy6VoAMdcTv8bfrW6e9iCgqWnXdXU6yMg==\",\"salt\":\"giVxblJWbFNNPiZZKxWYxg==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] }, { "id" : "7b9767ac-24dc-43b0-838f-29e16b4fd14e", "createdTimestamp" : 1675718483773, @@ -1528,29 +1229,6 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] - }, { - "id" : "d123d384-66a4-4db5-9dbb-d73c12047001", - "createdTimestamp" : 1678997616280, - "username" : "finance.project-lead", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "finance.project-lead@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "128" ] - }, - "credentials" : [ { - "id" : "b680f5c5-c2de-4255-9d23-7e18cff3ac4e", - "type" : "password", - "createdDate" : 1678997616336, - "secretData" : "{\"value\":\"4kasmb11Sv62rInh8eFUhS3rGYNymzsvxzfsEIWGYhnlisYuo1iTS2opv/kET/NyJlsYrfwc7yrIqSHvkUHkkA==\",\"salt\":\"q/ees3a4K+3K11olnfPzCQ==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] }, { "id" : "f6d2488a-446c-493b-bbe8-210ede6f3e42", "createdTimestamp" : 1674148694899, @@ -1661,8 +1339,8 @@ "notBefore" : 0, "groups" : [ ] }, { - "id" : "4f3fadc8-f0a3-45fb-8710-c054385b866b", - "createdTimestamp" : 1676302141941, + "id" : "1a8cb2a3-09ec-4f24-9f5e-13bab170c4a9", + "createdTimestamp" : 1680210955180, "username" : "infra.project-lead", "enabled" : true, "totp" : false, @@ -1672,10 +1350,10 @@ "spiffworkflow-employeeid" : [ "130" ] }, "credentials" : [ { - "id" : "e422f671-1693-4469-8cdc-0ea7dcb27c66", + "id" : "1283acee-35b4-40cd-a1cb-9dd3c41dfd3c", "type" : "password", - "createdDate" : 1676302141975, - "secretData" : "{\"value\":\"gWFNRdQhmsN2IMyaZEHgTk8A0mna72VYfeWk7PX31MhBQjQIGsctuEKK3TNxiB046LM8ZiUntA59sTPBgouVeQ==\",\"salt\":\"AtU0bmAz1z4f7wh/Z/ru1Q==\",\"additionalParameters\":{}}", + "createdDate" : 1680210955239, + "secretData" : "{\"value\":\"7wW+4snc/57IFEyCApWM7jwxJSLAlndSy/F3rSE0KOv/StS4HOByov02uDuTQ3h4CbW+zVp4+EqPFJiNWgf5WA==\",\"salt\":\"/BYeWVg0iy8Ou/YroWoeSw==\",\"additionalParameters\":{}}", "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" } ], "disableableCredentialTypes" : [ ], @@ -2054,8 +1732,8 @@ "notBefore" : 0, "groups" : [ ] }, { - "id" : "6e9129f9-34f8-43bb-953b-de4156d425ba", - "createdTimestamp" : 1676302142894, + "id" : "7596232c-47bd-40db-bc0d-fbe984ebb22a", + "createdTimestamp" : 1680210955394, "username" : "legal.project-lead", "enabled" : true, "totp" : false, @@ -2065,10 +1743,10 @@ "spiffworkflow-employeeid" : [ "133" ] }, "credentials" : [ { - "id" : "b17d488c-7665-40d4-b758-c392ecc9e793", + "id" : "e379cc51-564f-4950-92dd-7fa18cff5d3b", "type" : "password", - "createdDate" : 1676302142929, - "secretData" : "{\"value\":\"FiEmNY1c+4xOepA3lzOzzaaNgthk9rMz1xXiV+5F2DUwBtoEqFRrlGTdHVVz5XjrcFhgW15+R3rSEfHsCLJTiA==\",\"salt\":\"xYYuuodywbhxqXcj3XMqKw==\",\"additionalParameters\":{}}", + "createdDate" : 1680210955428, + "secretData" : "{\"value\":\"k+No1LvsqQmYTOQzuXN9oeVKne+FTCNAe4lZ4qVZq2M4pSRqKeySJWdtLYjxzHRfLufVpir6gXRCvs7ZiUL9GQ==\",\"salt\":\"XQ469z9b2a8Jw1IeZc9NaQ==\",\"additionalParameters\":{}}", "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" } ], "disableableCredentialTypes" : [ ], @@ -2214,75 +1892,6 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] - }, { - "id" : "a368625b-b905-4e0d-83f6-dfe707b6320a", - "createdTimestamp" : 1678461818455, - "username" : "legal6.sme", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "legal6.sme@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "213" ] - }, - "credentials" : [ { - "id" : "53a21d32-1da5-45f1-a7d9-e45304b213d1", - "type" : "password", - "createdDate" : 1678461818490, - "secretData" : "{\"value\":\"9zEoc1uV0QXsMvAS8lA1xdh4bOqcPdSAItg7zBFr5i+In/xOBtpRM0277nMgDNLtar4s+HRhytWgJ7OidVmjsw==\",\"salt\":\"ahEvQYvH0bHbT/uHz1I9QA==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "e02e085f-eb50-4fe3-844c-24e41479ab47", - "createdTimestamp" : 1678461818523, - "username" : "legal7.sme", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "legal7.sme@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "214" ] - }, - "credentials" : [ { - "id" : "f5377236-8b0b-4be4-8dab-afb2c4a6470f", - "type" : "password", - "createdDate" : 1678461818557, - "secretData" : "{\"value\":\"dyQhBsrNeYHkbJudEjiay3duLFO9B66l0d+2L26S+/HMGuKfuI4NT+gju1MfQPVJhyC01FH7EmDGGS8I45i2jw==\",\"salt\":\"kU4NM5QOWvGSX+kVyvwSoA==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "4de624bd-485f-49d5-817c-ba66c31be7a9", - "createdTimestamp" : 1678461818589, - "username" : "legal8.sme", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "legal8.sme@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "215" ] - }, - "credentials" : [ { - "id" : "5d71a02b-2f4b-484d-9125-a4454a17a800", - "type" : "password", - "createdDate" : 1678461818632, - "secretData" : "{\"value\":\"UH+hrjz9F+X0vQlbgzaFiZBA5uol9Lnjs1/5VpBnbWuISF6MAlxj2fmbnZbw4ILVSllaQvVSFaD4YUxbnRhUmw==\",\"salt\":\"MuAF2Rl7IOxOgZ7Xbqs3RQ==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] }, { "id" : "8a03f00f-310d-4bae-b918-f6f128f98095", "createdTimestamp" : 1677187934419, @@ -2352,29 +1961,6 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] - }, { - "id" : "97843876-e1b6-469a-bab4-f9bce4aa5936", - "createdTimestamp" : 1678461819014, - "username" : "mobile.project-lead", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "mobile.project-lead@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "221" ] - }, - "credentials" : [ { - "id" : "96c00769-4348-4ad3-82c5-f34124602c17", - "type" : "password", - "createdDate" : 1678461819049, - "secretData" : "{\"value\":\"E7nVydRqQ+TZs54VmJcT4AjjtT1la7PmQbOnylqTPkkcOdLRmZbNTw/K429lOhqUHX7y1prC3OjGdY1VI8bjsg==\",\"salt\":\"D61yv2zS3Bi8epVKjRpWQw==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] }, { "id" : "7f34beba-e1e1-458a-8d23-eb07d6e3800c", "createdTimestamp" : 1678126023154, @@ -2398,29 +1984,6 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] - }, { - "id" : "e8e67210-5088-46bc-97db-09dbcaf9de97", - "createdTimestamp" : 1678461818939, - "username" : "nomos.project-lead", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "nomos.project-lead@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "220" ] - }, - "credentials" : [ { - "id" : "8139f9b8-bad9-41d2-b3c6-589a2c11bf45", - "type" : "password", - "createdDate" : 1678461818975, - "secretData" : "{\"value\":\"6g5XIaFghMzx8CFYO6VJLGpUqBRiAEwFklZSI+uzJ5vrMsDvrcGjDuWtY+lmRO4lKqy30lBvqhMFvPT6pCxF3g==\",\"salt\":\"dT+XvwD+hxUwRAJCZFFYiA==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] }, { "id" : "df72b3d2-07fd-4cb0-a447-a1c433db49d5", "createdTimestamp" : 1676302143785, @@ -2606,100 +2169,8 @@ "notBefore" : 0, "groups" : [ ] }, { - "id" : "07f7a010-7542-4c2f-adf8-04b39433181d", - "createdTimestamp" : 1678461818663, - "username" : "peopleops.partner6.sme", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "peopleops.partner6.sme@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "216" ] - }, - "credentials" : [ { - "id" : "867e9236-3a15-4198-b085-d36a7fa859e9", - "type" : "password", - "createdDate" : 1678461818713, - "secretData" : "{\"value\":\"kmQkAD459XkLCGaWWTr1rrwZYQ2gQ4k2xTroJZAyHmWvBBnKg+a74cRaW2Y3dnzcGTlcprtuMvwYVfq7HIOkmg==\",\"salt\":\"uKORqhpJJnceOf/q56BiSA==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "5d41b5b7-bc3c-42fe-b20b-56a7c6cd3801", - "createdTimestamp" : 1678461818743, - "username" : "peopleops.partner7.sme", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "peopleops.partner7.sme@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "217" ] - }, - "credentials" : [ { - "id" : "745d419f-c6de-4504-9c8e-c3f7b1ac747e", - "type" : "password", - "createdDate" : 1678461818778, - "secretData" : "{\"value\":\"myjshlqPW/3DpwC5X4vsAaqcsisdKwqr+CQXP18mt3AQMzqipHJaVAEAJzkZS4j42VB/XAvh0olMxb8Vapyw3g==\",\"salt\":\"jNpX6DyT5Tt/5dPXYiQfpQ==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "73523c93-6104-4494-b1c8-2af6087bcdd9", - "createdTimestamp" : 1678461818810, - "username" : "peopleops.partner8.sme", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "peopleops.partner8.sme@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "218" ] - }, - "credentials" : [ { - "id" : "e839763b-aba2-4b4f-b715-b2c061b7430f", - "type" : "password", - "createdDate" : 1678461818843, - "secretData" : "{\"value\":\"M0KfNRU/4qt1WL/cGiSm6sKfN9PTK+6JiV96Y55Zg5CYaXH0ihTyGo62wS4T4YuyMm6/yTKz7+w3gdU4Zg/3Uw==\",\"salt\":\"sd/JEXtWTW4PetXzEBCNQA==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "cdff7ae3-72eb-45b6-9424-6f56df9c3b1c", - "createdTimestamp" : 1678461818873, - "username" : "peopleops.partner9.sme", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "peopleops.partner9.sme@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "219" ] - }, - "credentials" : [ { - "id" : "5ff8e042-a72e-4b46-9efa-e1910cd09d13", - "type" : "password", - "createdDate" : 1678461818908, - "secretData" : "{\"value\":\"q/hdvLKerMbnpe6yjC3VxDqCFi0ne7rD5A1K39EM+XgD6bFI62qKW5JIBB5BaGz/GrWYw7ipwMBaOvLBOubSkg==\",\"salt\":\"vfnCbi47kaYpILxbL0b3Tg==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "dbf941e7-0b45-4bc6-ae9e-d7153d32ce47", - "createdTimestamp" : 1676302143401, + "id" : "b57086d7-f301-4e11-ab02-60b02c79163a", + "createdTimestamp" : 1680210955550, "username" : "peopleops.project-lead", "enabled" : true, "totp" : false, @@ -2709,10 +2180,10 @@ "spiffworkflow-employeeid" : [ "147" ] }, "credentials" : [ { - "id" : "85fa4e0a-2f59-4c51-8e8b-20acb9813ab9", + "id" : "e17da85a-70ab-4f7d-8cff-6f4826f35bbc", "type" : "password", - "createdDate" : 1676302143434, - "secretData" : "{\"value\":\"FBi/INvDb50hA4QNRcSbd5gc10Dspq7QppiCvQ6ualnH/MlTyVq5CL9o1BWya0xxVdG/4jxFkUlgpN1w5liZ1Q==\",\"salt\":\"s2yJeI/k96iSy8zHAdTVSQ==\",\"additionalParameters\":{}}", + "createdDate" : 1680210955585, + "secretData" : "{\"value\":\"Llqk65fjzqPK6koWNRBPY6S1/T3GXgc4PHJSw/qlH7qzEQALzkKqMG1/C0s2EkAonj8WpIzZyEZKzRgMGqgh1g==\",\"salt\":\"1PoYqx4FYOST9EUEqbf9mA==\",\"additionalParameters\":{}}", "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" } ], "disableableCredentialTypes" : [ ], @@ -3203,29 +2674,6 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] - }, { - "id" : "c684e919-6ae0-4031-a160-8e90338567b3", - "createdTimestamp" : 1678461818310, - "username" : "security6.sme", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "security6.sme@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "211" ] - }, - "credentials" : [ { - "id" : "aff2f083-f6aa-4f93-899f-aaa3119a9739", - "type" : "password", - "createdDate" : 1678461818346, - "secretData" : "{\"value\":\"7XGMuiylxKmwDwJZtiPNLllERwN8KLoILLE/BjjXOkqN3c+C+KYgNxPhrDt8dG9PDYOq/59vh/4E2y82GLaoEw==\",\"salt\":\"ufzmAcoMLoi0jtRHwGDadg==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] }, { "id" : "b768e3ef-f905-4493-976c-bc3408c04bec", "createdTimestamp" : 1675447832524, @@ -3352,29 +2800,6 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] - }, { - "id" : "cb99a5c4-2c28-4b19-b8c7-635b757fc817", - "createdTimestamp" : 1678461818231, - "username" : "waku.research.project-lead", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "waku.research.project-lead@status.im", - "attributes" : { - "spiffworkflow-employeeid" : [ "164" ] - }, - "credentials" : [ { - "id" : "ed5fc4a1-d574-4940-b5e4-3a1ad9d122ba", - "type" : "password", - "createdDate" : 1678461818268, - "secretData" : "{\"value\":\"K7MRRw2gO4bXHJH8U4cZU2rcVQT/hxw7kMHqN1uDae9FVqFEKh014qiwePOHr5K1xjUw8uU5e/d3HCcwhuRUQw==\",\"salt\":\"R4FdsDK6NvelgQ8gH7Me0g==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "notBefore" : 0, - "groups" : [ ] } ], "scopeMappings" : [ { "clientScope" : "offline_access", @@ -4607,7 +4032,7 @@ "subType" : "authenticated", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-address-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper", "saml-user-property-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "saml-user-property-mapper", "oidc-usermodel-attribute-mapper" ] } }, { "id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", @@ -4625,7 +4050,7 @@ "subType" : "anonymous", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "saml-role-list-mapper", "oidc-usermodel-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper" ] } }, { "id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c", @@ -4715,7 +4140,7 @@ "internationalizationEnabled" : false, "supportedLocales" : [ ], "authenticationFlows" : [ { - "id" : "2b106fbb-fa1a-4acd-b95a-08e3ace9a0fc", + "id" : "52a2585e-d5f1-418b-aaf8-0cb6b8151ac1", "alias" : "Account verification options", "description" : "Method with which to verity the existing account", "providerId" : "basic-flow", @@ -4737,7 +4162,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "e3c77b34-6f89-4ddf-90da-486ad2cf620d", + "id" : "0f86958e-5010-413c-aabc-bc77e0973d29", "alias" : "Authentication Options", "description" : "Authentication options.", "providerId" : "basic-flow", @@ -4766,7 +4191,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "4df60d27-2ad2-4819-a7a2-45b5e8cc054b", + "id" : "f17c6e82-f120-4581-83bf-1b9252f26314", "alias" : "Browser - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4788,7 +4213,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "c6c0ab1c-e8cc-47f6-8b19-c89c9ad431aa", + "id" : "b6cef746-aa54-4474-b3fb-cddad60cedee", "alias" : "Direct Grant - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4810,7 +4235,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "d331b984-7398-4e87-9357-4f16b4389a6e", + "id" : "18aab899-fd48-4c40-b862-21ca89783c8c", "alias" : "First broker login - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4832,7 +4257,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "87cfccbe-25bc-41d8-b009-9b8e65ea244a", + "id" : "edf8650a-1745-4c0f-9bd9-7ee84d7bb85a", "alias" : "Handle Existing Account", "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", "providerId" : "basic-flow", @@ -4854,7 +4279,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "893e65ce-b2f2-4323-9c5a-bedfaef72ded", + "id" : "7df66a51-caa6-4b3a-8dd9-7d9e1dbe97a9", "alias" : "Reset - Conditional OTP", "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", "providerId" : "basic-flow", @@ -4876,7 +4301,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "815ce99a-21fe-43fb-8d73-4ff433d2c231", + "id" : "13a8650b-00d1-49d4-ba19-945908dbaf40", "alias" : "User creation or linking", "description" : "Flow for the existing/non-existing user alternatives", "providerId" : "basic-flow", @@ -4899,7 +4324,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "c05fc254-0382-49c3-a666-00623d5ee1fe", + "id" : "8e8ca30f-39b7-460a-9986-72483ed987b0", "alias" : "Verify Existing Account by Re-authentication", "description" : "Reauthentication of existing account", "providerId" : "basic-flow", @@ -4921,7 +4346,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "df015ab7-fa9d-416f-bcf0-a2ec26c13ede", + "id" : "91635e4a-d004-4529-b03f-5af754af2547", "alias" : "browser", "description" : "browser based authentication", "providerId" : "basic-flow", @@ -4957,7 +4382,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "95a3d414-80a3-42de-abdb-40512b13229e", + "id" : "4c6e7a05-f2d7-4a95-b0ea-be4979449827", "alias" : "clients", "description" : "Base authentication for clients", "providerId" : "client-flow", @@ -4993,7 +4418,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "923d6322-6d29-40bc-87e1-bcf13c6158fb", + "id" : "75d2ec8a-49bb-4790-a2da-af4dd150b9bb", "alias" : "direct grant", "description" : "OpenID Connect Resource Owner Grant", "providerId" : "basic-flow", @@ -5022,7 +4447,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "2f07fd17-a290-4d48-af3e-3cfd527fa5a1", + "id" : "a27a47d4-97bd-473f-9837-5d211c2a1f5d", "alias" : "docker auth", "description" : "Used by Docker clients to authenticate against the IDP", "providerId" : "basic-flow", @@ -5037,7 +4462,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "54100d19-bc91-4cba-af55-297a543eaa9a", + "id" : "d1c3cf02-f04b-48fc-a4b7-ffa0e5fd5b7f", "alias" : "first broker login", "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", "providerId" : "basic-flow", @@ -5060,7 +4485,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "05921eb5-d82c-4563-99e0-55e7911bf550", + "id" : "69abc281-22aa-4cd9-8a17-27c41840146e", "alias" : "forms", "description" : "Username, password, otp and other auth forms.", "providerId" : "basic-flow", @@ -5082,7 +4507,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "222cb198-cac2-4d61-826c-47aa77d73d3a", + "id" : "f47e8bfd-4f6c-44ee-b95e-2d5ae5b8fca3", "alias" : "http challenge", "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", "providerId" : "basic-flow", @@ -5104,7 +4529,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "5941ffb8-9d61-4b7e-b46e-b9160b92d9bc", + "id" : "e7f58101-c7a7-43b2-847f-ad03930d6a64", "alias" : "registration", "description" : "registration flow", "providerId" : "basic-flow", @@ -5120,7 +4545,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "d63ba5c0-e9ed-4f92-a6b8-c4f69b6258a8", + "id" : "b9a709c3-1f1b-4c69-baca-4287e792f0e4", "alias" : "registration form", "description" : "registration form", "providerId" : "form-flow", @@ -5156,7 +4581,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "82d2eb72-4cfa-41be-b800-96633b6bbf60", + "id" : "5824833e-704d-4b25-8b48-44f7a5ff0584", "alias" : "reset credentials", "description" : "Reset credentials for a user if they forgot their password or something", "providerId" : "basic-flow", @@ -5192,7 +4617,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "aeacc85c-e8da-41c8-84bb-4740214c3d1f", + "id" : "c9d37f8a-b04b-40d2-a1a3-bd0c78f4c1a8", "alias" : "saml ecp", "description" : "SAML ECP Profile Authentication Flow", "providerId" : "basic-flow", @@ -5208,13 +4633,13 @@ } ] } ], "authenticatorConfig" : [ { - "id" : "1f753a86-8657-4ec9-87bc-94d79e3aa3f8", + "id" : "e7bb6227-7b03-4c6b-ae97-b72e69967be4", "alias" : "create unique user config", "config" : { "require.password.update.after.registration" : "false" } }, { - "id" : "b92405c6-1646-4cf2-8c8d-0f66026024ed", + "id" : "5a856c63-1341-42c2-aa5b-24246324816b", "alias" : "review profile config", "config" : { "update.profile.on.first.login" : "missing" diff --git a/spiffworkflow-backend/keycloak/test_user_lists/sartography b/spiffworkflow-backend/keycloak/test_user_lists/sartography index d5d5c7bf..9b587465 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/sartography +++ b/spiffworkflow-backend/keycloak/test_user_lists/sartography @@ -1,9 +1,9 @@ -email,spiffworkflow-employeeid -alex@sartography.com,111 -dan@sartography.com,115 -daniel@sartography.com -elizabeth@sartography.com -j@sartography.com -jon@sartography.com -kb@sartography.com -madhurya@sartography.com,160 +email,pass,spiffworkflow-employeeid +alex@sartography.com,,111 +dan@sartography.com,,115 +daniel@sartography.com,, +elizabeth@sartography.com,, +j@sartography.com,, +jon@sartography.com,, +kb@sartography.com,, +madhurya@sartography.com,,160 diff --git a/spiffworkflow-backend/keycloak/test_user_lists/status b/spiffworkflow-backend/keycloak/test_user_lists/status index c702a9a6..6eb987b1 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/status +++ b/spiffworkflow-backend/keycloak/test_user_lists/status @@ -1,116 +1,90 @@ -email,spiffworkflow-employeeid +email,pass,spiffworkflow-employeeid # admin@spiffworkflow.org amir@status.im -app.program-lead@status.im,121 -codex-a1.sme@status.im,209 -codex.project-lead@status.im,153 -codex.sme@status.im,185 -codex1.sme@status.im,186 -codex2.sme@status.im,187 -codex3.sme@status.im,188 -codex4.sme@status.im,189 -codex5.sme@status.im,190 -core-a1.contributor@status.im,202 -core-a2.contributor@status.im,203 -core1.contributor@status.im,155 -core10.contributor@status.im,225 -core11.contributor@status.im,226 -core12.contributor@status.im,227 -core13.contributor@status.im,228 -core14.contributor@status.im,229 -core15.contributor@status.im,230 -core16.contributor@status.im,231 -core17.contributor@status.im,232 -core18.contributor@status.im,233 -core2.contributor@status.im,156 -core3.contributor@status.im,157 -core4.contributor@status.im,158 -core5.contributor@status.im,159 -core6.contributor@status.im,199 -core7.contributor@status.im,222 -core8.contributor@status.im,223 -core9.contributor@status.im,224 -core@status.im,113 +app.program-lead@status.im,,121 +codex-a1.sme@status.im,,209 +codex.project-lead@status.im,,153 +codex.sme@status.im,,185 +codex1.sme@status.im,,186 +codex2.sme@status.im,,187 +codex3.sme@status.im,,188 +codex4.sme@status.im,,189 +codex5.sme@status.im,,190 +core-a1.contributor@status.im,,202 +core-a2.contributor@status.im,,203 +core1.contributor@status.im,,155 +core2.contributor@status.im,,156 +core3.contributor@status.im,,157 +core4.contributor@status.im,,158 +core5.contributor@status.im,,159 +core@status.im,,113 dao.project.lead@status.im -desktop-a1.sme@status.im,210 +desktop-a1.sme@status.im,,210 desktop.program.lead@status.im -desktop.project-lead@status.im,192 +desktop.project-lead@status.im,,192 desktop.project.lead@status.im -desktop.sme@status.im,193 -desktop1.sme@status.im,194 -desktop2.sme@status.im,195 -desktop3.sme@status.im,196 -desktop4.sme@status.im,197 -desktop5.sme@status.im,198 -fin@status.im,118 -finance.project-lead@status.im,128 +desktop.sme@status.im,,193 +desktop1.sme@status.im,,194 +desktop2.sme@status.im,,195 +desktop3.sme@status.im,,196 +desktop4.sme@status.im,,197 +desktop5.sme@status.im,,198 +fin@status.im,,118 finance_user1@status.im -fluffy.project-lead@status.im,162 -harmeet@status.im,109 -infra-a1.sme@status.im,204 -infra.project-lead@status.im,130 -infra.sme@status.im,119 -infra1.sme@status.im,131 -infra2.sme@status.im,132 -infra3.sme@status.im,167 -infra4.sme@status.im,175 -infra5.sme@status.im,176 -infra6.sme@status.im,212 +fluffy.project-lead@status.im,,162 +harmeet@status.im,,109 +infra-a1.sme@status.im,,204 +infra.project-lead@status.im,infra.project-leadx,130 +infra.sme@status.im,,119 +infra1.sme@status.im,,131 +infra2.sme@status.im,,132 +infra3.sme@status.im,,167 +infra4.sme@status.im,,175 +infra5.sme@status.im,,176 jakub@status.im -jamescheung@status.im,234 +jamescheung@status.im,,234 jarrad@status.im -lead@status.im,114 -legal-a1.sme@status.im,205 -legal.project-lead@status.im,133 -legal.sme@status.im,125 -legal1.sme@status.im,134 -legal2.sme@status.im,165 -legal3.sme@status.im,166 -legal4.sme@status.im,177 -legal5.sme@status.im,178 -legal6.sme@status.im,213 -legal7.sme@status.im,214 -legal8.sme@status.im,215 -logos.program-lead@status.im,160 -manuchehr@status.im,110 -mobile.project-lead@status.im,221 -nimbus.program-lead@status.im,161 -nomos.project-lead@status.im,220 -peopleops.partner-a1.sme@status.im,208 -peopleops.partner.sme@status.im,148 -peopleops.partner1.sme@status.im,149 -peopleops.partner2.sme@status.im,173 -peopleops.partner3.sme@status.im,174 -peopleops.partner4.sme@status.im,181 -peopleops.partner5.sme@status.im,182 -peopleops.partner6.sme@status.im,216 -peopleops.partner7.sme@status.im,217 -peopleops.partner8.sme@status.im,218 -peopleops.partner9.sme@status.im,219 -peopleops.partner@status.im,150 -peopleops.project-lead@status.im,147 -peopleops.talent.sme@status.im,143 -peopleops.talent1.sme@status.im,142 -peopleops.talent@status.im,141 -ppg.ba-a1.sme@status.im,207 -ppg.ba.project-lead@status.im,137 -ppg.ba.sme@status.im,138 -ppg.ba1.sme@status.im,170 -ppg.ba2.sme@status.im,171 -ppg.ba3.sme@status.im,172 -ppg.ba4.sme@status.im,200 -ppg.ba5.sme@status.im,201 -ppg.ba@status.im,127 -sasha@status.im,112 -security-a1.sme@status.im,206 -security.project-lead@status.im,151 -security.sme@status.im,123 -security1.sme@status.im,135 -security2.sme@status.im,168 -security3.sme@status.im,169 -security4.sme@status.im,179 -security5.sme@status.im,180 -security6.sme@status.im,211 -services.lead@status.im,122 -vac.program-lead@status.im,163 -waku.research.project-lead@status.im,164 +lead@status.im,,114 +legal-a1.sme@status.im,,205 +legal.project-lead@status.im,legal.project-leadx,133 +legal.sme@status.im,,125 +legal1.sme@status.im,,134 +legal2.sme@status.im,,165 +legal3.sme@status.im,,166 +legal4.sme@status.im,,177 +legal5.sme@status.im,,178 +logos.program-lead@status.im,,160 +manuchehr@status.im,,110 +nimbus.program-lead@status.im,,161 +peopleops.partner-a1.sme@status.im,,208 +peopleops.partner.sme@status.im,,148 +peopleops.partner1.sme@status.im,,149 +peopleops.partner2.sme@status.im,,173 +peopleops.partner3.sme@status.im,,174 +peopleops.partner4.sme@status.im,,181 +peopleops.partner5.sme@status.im,,182 +peopleops.partner@status.im,,150 +peopleops.project-lead@status.im,peopleops.project-leadx,147 +peopleops.talent.sme@status.im,,143 +peopleops.talent1.sme@status.im,,142 +peopleops.talent@status.im,,141 +ppg.ba-a1.sme@status.im,,207 +ppg.ba.project-lead@status.im,,137 +ppg.ba.sme@status.im,,138 +ppg.ba1.sme@status.im,,170 +ppg.ba2.sme@status.im,,171 +ppg.ba3.sme@status.im,,172 +ppg.ba4.sme@status.im,,200 +ppg.ba5.sme@status.im,,201 +ppg.ba@status.im,,127 +sasha@status.im,,112 +security-a1.sme@status.im,,206 +security.project-lead@status.im,,151 +security.sme@status.im,,123 +security1.sme@status.im,,135 +security2.sme@status.im,,168 +security3.sme@status.im,,169 +security4.sme@status.im,,179 +security5.sme@status.im,,180 +services.lead@status.im,,122 +vac.program-lead@status.im,,163 From be5bf319744e665063d21fb5cef0e7d718da7044 Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 31 Mar 2023 10:48:16 -0400 Subject: [PATCH 143/162] added test for loopback to subprocess and fixed issue w/ burnettk --- .../services/process_instance_processor.py | 5 + .../services/task_service.py | 26 ++-- .../services/workflow_execution_service.py | 4 +- .../loopback_to_subprocess.bpmn | 116 ++++++++++++++++++ .../unit/test_process_instance_processor.py | 46 ++++++- 5 files changed, 180 insertions(+), 17 deletions(-) create mode 100644 spiffworkflow-backend/tests/data/loopback_to_subprocess/loopback_to_subprocess.bpmn diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 1fff87c1..a5bab8af 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1897,6 +1897,11 @@ class ProcessInstanceProcessor: all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) return [t for t in all_tasks if t.state in [TaskState.WAITING, TaskState.READY]] + def get_task_by_guid( + self, task_guid: str + ) -> Optional[SpiffTask]: + return self.bpmn_process_instance.get_task_from_id(UUID(task_guid)) + @classmethod def get_task_by_bpmn_identifier( cls, bpmn_task_identifier: str, bpmn_process_instance: BpmnWorkflow diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index fef8265f..e9839fa7 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -60,7 +60,7 @@ class TaskService: spiff_task: SpiffTask, ) -> None: self.process_spiff_task_children(spiff_task) - self.process_spiff_task_parents(spiff_task) + self.process_spiff_task_parent_subprocess_tasks(spiff_task) self.save_objects_to_database() def process_spiff_task_children( @@ -68,9 +68,9 @@ class TaskService: spiff_task: SpiffTask, ) -> None: for child_spiff_task in spiff_task.children: - if child_spiff_task._has_state(TaskState.PREDICTED_MASK): - self.__class__.remove_spiff_task_from_parent(child_spiff_task, self.task_models) - continue + # if child_spiff_task._has_state(TaskState.PREDICTED_MASK): + # self.__class__.remove_spiff_task_from_parent(child_spiff_task, self.task_models) + # continue self.update_task_model_with_spiff_task( spiff_task=child_spiff_task, ) @@ -78,10 +78,15 @@ class TaskService: spiff_task=child_spiff_task, ) - def process_spiff_task_parents( + def process_spiff_task_parent_subprocess_tasks( self, spiff_task: SpiffTask, ) -> None: + """Find the parent subprocess of a given spiff_task and update its data. + + This will also process that subprocess task's children and will recurse upwards + to process its parent subprocesses as well. + """ (parent_subprocess_guid, _parent_subprocess) = self.__class__.task_subprocess(spiff_task) if parent_subprocess_guid is not None: spiff_task_of_parent_subprocess = spiff_task.workflow._get_outermost_workflow().get_task_from_id( @@ -92,7 +97,10 @@ class TaskService: self.update_task_model_with_spiff_task( spiff_task=spiff_task_of_parent_subprocess, ) - self.process_spiff_task_parents( + self.process_spiff_task_children( + spiff_task=spiff_task_of_parent_subprocess, + ) + self.process_spiff_task_parent_subprocess_tasks( spiff_task=spiff_task_of_parent_subprocess, ) @@ -391,9 +399,9 @@ class TaskService: # we are going to avoid saving likely and maybe tasks to the db. # that means we need to remove them from their parents' lists of children as well. spiff_task = spiff_workflow.get_task_from_id(UUID(task_id)) - if spiff_task._has_state(TaskState.PREDICTED_MASK): - cls.remove_spiff_task_from_parent(spiff_task, new_task_models) - continue + # if spiff_task._has_state(TaskState.PREDICTED_MASK): + # cls.remove_spiff_task_from_parent(spiff_task, new_task_models) + # continue task_model = TaskModel.query.filter_by(guid=task_id).first() if task_model is None: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 495ab310..310286e7 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -110,8 +110,8 @@ class TaskModelSavingDelegate(EngineStepDelegate): # ): # self._update_task_model_with_spiff_task(waiting_spiff_task) if self.last_completed_spiff_task is not None: - import pdb; pdb.set_trace() - self.task_service.process_spiff_task_parents(self.last_completed_spiff_task) + # import pdb; pdb.set_trace() + self.task_service.process_spiff_task_parent_subprocess_tasks(self.last_completed_spiff_task) self.task_service.process_spiff_task_children(self.last_completed_spiff_task) def _should_update_task_model(self) -> bool: diff --git a/spiffworkflow-backend/tests/data/loopback_to_subprocess/loopback_to_subprocess.bpmn b/spiffworkflow-backend/tests/data/loopback_to_subprocess/loopback_to_subprocess.bpmn new file mode 100644 index 00000000..eff8cd2f --- /dev/null +++ b/spiffworkflow-backend/tests/data/loopback_to_subprocess/loopback_to_subprocess.bpmn @@ -0,0 +1,116 @@ + + + + + Flow_1dk6oyl + + + Flow_0s9lss3 + Flow_02xy1ag + Flow_11uu31d + + + + Flow_0sw85uk + Flow_0s9lss3 + x=1 + + + Flow_02xy1ag + + + x==2 + + + + + Flow_1dk6oyl + Flow_11uu31d + Flow_0sw85uk + + Flow_0ih1i19 + + + + Flow_0dua5j8 + + + + + HEY MANUAL + + Flow_0ih1i19 + Flow_0dua5j8 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 3d949388..34c71e7c 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -696,14 +696,48 @@ class TestProcessInstanceProcessor(BaseTest): assert len(process_instance.human_tasks) == 2 human_task_two = process_instance.active_human_tasks[0] - # this is just asserting the way the functionality currently works in spiff. - # we would actually expect this to change one day if we stop reusing the same guid - # when we re-do a task. - # assert human_task_two.task_id == human_task_one.task_id - - # EDIT: when using feature/remove-loop-reset branch of SpiffWorkflow, these should be different. assert human_task_two.task_id != human_task_one.task_id + def test_it_can_loopback_to_previous_bpmn_subprocess_with_gateway( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + initiator_user = self.find_or_create_user("initiator_user") + process_model = load_test_spec( + process_model_id="test_group/loopback_to_subprocess", + process_model_source_directory="loopback_to_subprocess", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=initiator_user + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + + assert len(process_instance.active_human_tasks) == 1 + assert len(process_instance.human_tasks) == 1 + human_task_one = process_instance.active_human_tasks[0] + + spiff_task = processor.get_task_by_guid(human_task_one.task_id) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task_one) + + processor = ProcessInstanceProcessor(process_instance) + assert len(process_instance.active_human_tasks) == 1 + assert len(process_instance.human_tasks) == 2 + human_task_two = process_instance.active_human_tasks[0] + spiff_task = processor.get_task_by_guid(human_task_two.task_id) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task_two) + + import pdb; pdb.set_trace() + # ensure this does not raise a KeyError + processor = ProcessInstanceProcessor(process_instance) + assert len(process_instance.active_human_tasks) == 1 + assert len(process_instance.human_tasks) == 3 + human_task_three = process_instance.active_human_tasks[0] + spiff_task = processor.get_task_by_guid(human_task_three.task_id) + ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task_three) + def test_task_data_is_set_even_if_process_instance_errors( self, app: Flask, From 783faa7ce93797d8341ba7fec22831f4f9e38854 Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 31 Mar 2023 10:57:13 -0400 Subject: [PATCH 144/162] some cleanup before merging to main w/ burnettk --- .../services/process_instance_processor.py | 241 ++++++++------- .../services/workflow_execution_service.py | 2 - .../integration/test_process_api.py | 1 - .../unit/test_process_instance_processor.py | 279 +++++++++--------- .../src/routes/ProcessInstanceShow.tsx | 20 +- 5 files changed, 269 insertions(+), 274 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index a5bab8af..93cd64fb 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1,6 +1,5 @@ """Process_instance_processor.""" import _strptime # type: ignore -import copy import decimal import json import logging @@ -51,8 +50,6 @@ from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ign from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore -from sqlalchemy import and_ -from sqlalchemy import or_ from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel @@ -1266,123 +1263,123 @@ class ProcessInstanceProcessor: cls, process_instance: ProcessInstanceModel, to_task_guid: str, commit: Optional[bool] = False ) -> None: """Reset a process to an earlier state.""" - # raise Exception("This feature to reset a process instance to a given task is currently unavaiable") - cls.add_event_to_process_instance( - process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid - ) - - to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() - if to_task_model is None: - raise TaskNotFoundError( - f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" - ) - - parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes( - to_task_model - ) - [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] - [p.id for p in parent_bpmn_processes] - tasks_to_update_query = db.session.query(TaskModel).filter( - and_( - or_( - TaskModel.end_in_seconds > to_task_model.end_in_seconds, - TaskModel.end_in_seconds.is_(None), # type: ignore - ), - TaskModel.process_instance_id == process_instance.id, - # TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore - ) - ) - tasks_to_update = tasks_to_update_query.all() - - # run all queries before making changes to task_model - if commit: - # tasks_to_delete_query = db.session.query(TaskModel).filter( - # and_( - # or_( - # TaskModel.end_in_seconds > to_task_model.end_in_seconds, - # TaskModel.end_in_seconds.is_not(None), # type: ignore - # ), - # TaskModel.process_instance_id == process_instance.id, - # TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore - # TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore - # ) - # ) - # - # tasks_to_delete = tasks_to_delete_query.all() - # - # # delete any later tasks from to_task_model and delete bpmn processes that may be - # # link directly to one of those tasks. - # tasks_to_delete_guids = [t.guid for t in tasks_to_delete] - # tasks_to_delete_ids = [t.id for t in tasks_to_delete] - # bpmn_processes_to_delete = BpmnProcessModel.query.filter( - # BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore - # ).order_by(BpmnProcessModel.id.desc()).all() - # human_tasks_to_delete = HumanTaskModel.query.filter( - # HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore - # ).all() - # - # - # import pdb; pdb.set_trace() - # # ensure the correct order for foreign keys - # for human_task_to_delete in human_tasks_to_delete: - # db.session.delete(human_task_to_delete) - # db.session.commit() - # for task_to_delete in tasks_to_delete: - # db.session.delete(task_to_delete) - # db.session.commit() - # for bpmn_process_to_delete in bpmn_processes_to_delete: - # db.session.delete(bpmn_process_to_delete) - # db.session.commit() - - related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() - if related_human_task is not None: - db.session.delete(related_human_task) - - tasks_to_update_ids = [t.id for t in tasks_to_update] - human_tasks_to_delete = HumanTaskModel.query.filter( - HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore - ).all() - for human_task_to_delete in human_tasks_to_delete: - db.session.delete(human_task_to_delete) - db.session.commit() - - for task_to_update in tasks_to_update: - # print(f"task_to_update: {task_to_update}") - print(f"task_to_update.state: {task_to_update.state}") - TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) - # TaskService.reset_task_model(task_to_update, state=task_to_update.state, commit=commit) - # if task_to_update.task_definition.bpmn_identifier != 'top_level_process_script_after_gate': - # TaskService.reset_task_model(task_to_update, state='FUTURE', commit=commit) - # else: - # TaskService.reset_task_model(task_to_update, state=task_to_update.state, commit=commit) - - parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first() - if parent_task_model is None: - raise TaskNotFoundError( - f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" - ) - - TaskService.reset_task_model( - to_task_model, - state="READY", - json_data_hash=parent_task_model.json_data_hash, - python_env_data_hash=parent_task_model.python_env_data_hash, - commit=commit, - ) - for task_model in task_models_of_parent_bpmn_processes: - TaskService.reset_task_model(task_model, state="WAITING", commit=commit) - - bpmn_process = to_task_model.bpmn_process - properties_json = copy.copy(bpmn_process.properties_json) - properties_json["last_task"] = parent_task_model.guid - bpmn_process.properties_json = properties_json - db.session.add(bpmn_process) - db.session.commit() - - if commit: - processor = ProcessInstanceProcessor(process_instance) - processor.save() - processor.suspend() + raise Exception("This feature to reset a process instance to a given task is currently unavaiable") + # cls.add_event_to_process_instance( + # process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid + # ) + # + # to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() + # if to_task_model is None: + # raise TaskNotFoundError( + # f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" + # ) + # + # parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes( + # to_task_model + # ) + # [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] + # [p.id for p in parent_bpmn_processes] + # tasks_to_update_query = db.session.query(TaskModel).filter( + # and_( + # or_( + # TaskModel.end_in_seconds > to_task_model.end_in_seconds, + # TaskModel.end_in_seconds.is_(None), # type: ignore + # ), + # TaskModel.process_instance_id == process_instance.id, + # # TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore + # ) + # ) + # tasks_to_update = tasks_to_update_query.all() + # + # # run all queries before making changes to task_model + # if commit: + # # tasks_to_delete_query = db.session.query(TaskModel).filter( + # # and_( + # # or_( + # # TaskModel.end_in_seconds > to_task_model.end_in_seconds, + # # TaskModel.end_in_seconds.is_not(None), # type: ignore + # # ), + # # TaskModel.process_instance_id == process_instance.id, + # # TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore + # # TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore + # # ) + # # ) + # # + # # tasks_to_delete = tasks_to_delete_query.all() + # # + # # # delete any later tasks from to_task_model and delete bpmn processes that may be + # # # link directly to one of those tasks. + # # tasks_to_delete_guids = [t.guid for t in tasks_to_delete] + # # tasks_to_delete_ids = [t.id for t in tasks_to_delete] + # # bpmn_processes_to_delete = BpmnProcessModel.query.filter( + # # BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore + # # ).order_by(BpmnProcessModel.id.desc()).all() + # # human_tasks_to_delete = HumanTaskModel.query.filter( + # # HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore + # # ).all() + # # + # # + # # import pdb; pdb.set_trace() + # # # ensure the correct order for foreign keys + # # for human_task_to_delete in human_tasks_to_delete: + # # db.session.delete(human_task_to_delete) + # # db.session.commit() + # # for task_to_delete in tasks_to_delete: + # # db.session.delete(task_to_delete) + # # db.session.commit() + # # for bpmn_process_to_delete in bpmn_processes_to_delete: + # # db.session.delete(bpmn_process_to_delete) + # # db.session.commit() + # + # related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() + # if related_human_task is not None: + # db.session.delete(related_human_task) + # + # tasks_to_update_ids = [t.id for t in tasks_to_update] + # human_tasks_to_delete = HumanTaskModel.query.filter( + # HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore + # ).all() + # for human_task_to_delete in human_tasks_to_delete: + # db.session.delete(human_task_to_delete) + # db.session.commit() + # + # for task_to_update in tasks_to_update: + # # print(f"task_to_update: {task_to_update}") + # print(f"task_to_update.state: {task_to_update.state}") + # TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) + # # TaskService.reset_task_model(task_to_update, state=task_to_update.state, commit=commit) + # # if task_to_update.task_definition.bpmn_identifier != 'top_level_process_script_after_gate': + # # TaskService.reset_task_model(task_to_update, state='FUTURE', commit=commit) + # # else: + # # TaskService.reset_task_model(task_to_update, state=task_to_update.state, commit=commit) + # + # parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first() + # if parent_task_model is None: + # raise TaskNotFoundError( + # f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" + # ) + # + # TaskService.reset_task_model( + # to_task_model, + # state="READY", + # json_data_hash=parent_task_model.json_data_hash, + # python_env_data_hash=parent_task_model.python_env_data_hash, + # commit=commit, + # ) + # for task_model in task_models_of_parent_bpmn_processes: + # TaskService.reset_task_model(task_model, state="WAITING", commit=commit) + # + # bpmn_process = to_task_model.bpmn_process + # properties_json = copy.copy(bpmn_process.properties_json) + # properties_json["last_task"] = parent_task_model.guid + # bpmn_process.properties_json = properties_json + # db.session.add(bpmn_process) + # db.session.commit() + # + # if commit: + # processor = ProcessInstanceProcessor(process_instance) + # processor.save() + # processor.suspend() @staticmethod def get_parser() -> MyCustomParser: @@ -1897,9 +1894,7 @@ class ProcessInstanceProcessor: all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) return [t for t in all_tasks if t.state in [TaskState.WAITING, TaskState.READY]] - def get_task_by_guid( - self, task_guid: str - ) -> Optional[SpiffTask]: + def get_task_by_guid(self, task_guid: str) -> Optional[SpiffTask]: return self.bpmn_process_instance.get_task_from_id(UUID(task_guid)) @classmethod diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 310286e7..babff151 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -72,7 +72,6 @@ class TaskModelSavingDelegate(EngineStepDelegate): def will_complete_task(self, spiff_task: SpiffTask) -> None: if self._should_update_task_model(): self.current_task_start_in_seconds = time.time() - # import pdb; pdb.set_trace() spiff_task.task_spec._predict(spiff_task, mask=TaskState.NOT_FINISHED_MASK) if self.secondary_engine_step_delegate: self.secondary_engine_step_delegate.will_complete_task(spiff_task) @@ -110,7 +109,6 @@ class TaskModelSavingDelegate(EngineStepDelegate): # ): # self._update_task_model_with_spiff_task(waiting_spiff_task) if self.last_completed_spiff_task is not None: - # import pdb; pdb.set_trace() self.task_service.process_spiff_task_parent_subprocess_tasks(self.last_completed_spiff_task) self.task_service.process_spiff_task_children(self.last_completed_spiff_task) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 84d970bd..c5623f47 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2618,7 +2618,6 @@ class TestProcessApi(BaseTest): ) assert response.status_code == 200 assert response.json is not None - import pdb; pdb.set_trace() assert response.json["status"] == "complete" response = client.get( diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 34c71e7c..1caa952d 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -16,7 +16,6 @@ from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus from spiffworkflow_backend.models.task import TaskModel # noqa: F401 -from spiffworkflow_backend.models.task_definition import TaskDefinitionModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import ( @@ -258,125 +257,128 @@ class TestProcessInstanceProcessor(BaseTest): assert spiff_task is not None assert spiff_task.state == TaskState.COMPLETED - def test_properly_resets_process_to_given_task( - self, - app: Flask, - client: FlaskClient, - with_db_and_bpmn_file_cleanup: None, - with_super_admin_user: UserModel, - ) -> None: - self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") - initiator_user = self.find_or_create_user("initiator_user") - finance_user_three = self.find_or_create_user("testuser3") - assert initiator_user.principal is not None - assert finance_user_three.principal is not None - AuthorizationService.import_permissions_from_yaml_file() - - finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() - assert finance_group is not None - - process_model = load_test_spec( - process_model_id="test_group/manual_task", - process_model_source_directory="manual_task", - ) - process_instance = self.create_process_instance_from_process_model( - process_model=process_model, user=initiator_user - ) - processor = ProcessInstanceProcessor(process_instance) - processor.do_engine_steps(save=True) - assert len(process_instance.active_human_tasks) == 1 - initial_human_task_id = process_instance.active_human_tasks[0].id - - # save again to ensure we go attempt to process the human tasks again - processor.save() - - assert len(process_instance.active_human_tasks) == 1 - assert initial_human_task_id == process_instance.active_human_tasks[0].id - - processor = ProcessInstanceProcessor(process_instance) - human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( - human_task_one.task_name, processor.bpmn_process_instance - ) - assert spiff_manual_task is not None - - processor.suspend() - ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.parent.id), commit=True) - - process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() - processor = ProcessInstanceProcessor(process_instance) - processor.resume() - processor.do_engine_steps(save=True) - human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) - ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - assert process_instance.status == "complete" - - def test_properly_resets_process_to_given_task_with_call_activity( - self, - app: Flask, - client: FlaskClient, - with_db_and_bpmn_file_cleanup: None, - with_super_admin_user: UserModel, - ) -> None: - self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") - initiator_user = self.find_or_create_user("initiator_user") - finance_user_three = self.find_or_create_user("testuser3") - assert initiator_user.principal is not None - assert finance_user_three.principal is not None - AuthorizationService.import_permissions_from_yaml_file() - - finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() - assert finance_group is not None - - process_model = load_test_spec( - process_model_id="test_group/manual_task_with_subprocesses", - process_model_source_directory="manual_task_with_subprocesses", - ) - process_instance = self.create_process_instance_from_process_model( - process_model=process_model, user=initiator_user - ) - processor = ProcessInstanceProcessor(process_instance) - processor.do_engine_steps(save=True) - # with open("before_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2)) - assert len(process_instance.active_human_tasks) == 1 - initial_human_task_id = process_instance.active_human_tasks[0].id - assert len(process_instance.active_human_tasks) == 1 - assert initial_human_task_id == process_instance.active_human_tasks[0].id - - human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) - ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) - ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - - ### NOTES: - # somehow we are hosing the task state so that when completing tasks of a subprocess, the task AFTER the subprocess task - # is not marked READY but instead stays as FUTURE. Running things like: - # self.last_completed_spiff_task.task_spec._update(self.last_completed_spiff_task) - # and - # self.last_completed_spiff_task.task_spec._predict(self.last_completed_spiff_task, mask=TaskState.NOT_FINISHED_MASK) - # did not help. - - processor.suspend() - # import pdb; pdb.set_trace() - task_model_to_reset_to = TaskModel.query.join(TaskDefinitionModel).filter(TaskDefinitionModel.bpmn_identifier == 'top_level_subprocess_script').order_by(TaskModel.id.desc()).first() - assert task_model_to_reset_to is not None - ProcessInstanceProcessor.reset_process(process_instance, task_model_to_reset_to.guid, commit=True) - # import pdb; pdb.set_trace() - - process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() - processor = ProcessInstanceProcessor(process_instance) - processor.resume() - processor.do_engine_steps(save=True) - import pdb; pdb.set_trace() - assert len(process_instance.active_human_tasks) == 1 - human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) - ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - - assert process_instance.status == "complete" + # def test_properly_resets_process_to_given_task( + # self, + # app: Flask, + # client: FlaskClient, + # with_db_and_bpmn_file_cleanup: None, + # with_super_admin_user: UserModel, + # ) -> None: + # self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") + # initiator_user = self.find_or_create_user("initiator_user") + # finance_user_three = self.find_or_create_user("testuser3") + # assert initiator_user.principal is not None + # assert finance_user_three.principal is not None + # AuthorizationService.import_permissions_from_yaml_file() + # + # finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() + # assert finance_group is not None + # + # process_model = load_test_spec( + # process_model_id="test_group/manual_task", + # process_model_source_directory="manual_task", + # ) + # process_instance = self.create_process_instance_from_process_model( + # process_model=process_model, user=initiator_user + # ) + # processor = ProcessInstanceProcessor(process_instance) + # processor.do_engine_steps(save=True) + # assert len(process_instance.active_human_tasks) == 1 + # initial_human_task_id = process_instance.active_human_tasks[0].id + # + # # save again to ensure we go attempt to process the human tasks again + # processor.save() + # + # assert len(process_instance.active_human_tasks) == 1 + # assert initial_human_task_id == process_instance.active_human_tasks[0].id + # + # processor = ProcessInstanceProcessor(process_instance) + # human_task_one = process_instance.active_human_tasks[0] + # spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( + # human_task_one.task_name, processor.bpmn_process_instance + # ) + # assert spiff_manual_task is not None + # + # processor.suspend() + # ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.parent.id), commit=True) + # + # process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + # processor = ProcessInstanceProcessor(process_instance) + # processor.resume() + # processor.do_engine_steps(save=True) + # human_task_one = process_instance.active_human_tasks[0] + # spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + # assert process_instance.status == "complete" + # + # def test_properly_resets_process_to_given_task_with_call_activity( + # self, + # app: Flask, + # client: FlaskClient, + # with_db_and_bpmn_file_cleanup: None, + # with_super_admin_user: UserModel, + # ) -> None: + # self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") + # initiator_user = self.find_or_create_user("initiator_user") + # finance_user_three = self.find_or_create_user("testuser3") + # assert initiator_user.principal is not None + # assert finance_user_three.principal is not None + # AuthorizationService.import_permissions_from_yaml_file() + # + # finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() + # assert finance_group is not None + # + # process_model = load_test_spec( + # process_model_id="test_group/manual_task_with_subprocesses", + # process_model_source_directory="manual_task_with_subprocesses", + # ) + # process_instance = self.create_process_instance_from_process_model( + # process_model=process_model, user=initiator_user + # ) + # processor = ProcessInstanceProcessor(process_instance) + # processor.do_engine_steps(save=True) + # # with open("before_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2)) + # assert len(process_instance.active_human_tasks) == 1 + # initial_human_task_id = process_instance.active_human_tasks[0].id + # assert len(process_instance.active_human_tasks) == 1 + # assert initial_human_task_id == process_instance.active_human_tasks[0].id + # + # human_task_one = process_instance.active_human_tasks[0] + # spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + # human_task_one = process_instance.active_human_tasks[0] + # spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + # + # # NOTES: + # # somehow we are hosing the task state so that when completing tasks of a subprocess, the task AFTER the subprocess task + # # is not marked READY but instead stays as FUTURE. Running things like: + # # self.last_completed_spiff_task.task_spec._update(self.last_completed_spiff_task) + # # and + # # self.last_completed_spiff_task.task_spec._predict(self.last_completed_spiff_task, mask=TaskState.NOT_FINISHED_MASK) + # # did not help. + # + # processor.suspend() + # task_model_to_reset_to = ( + # TaskModel.query.join(TaskDefinitionModel) + # .filter(TaskDefinitionModel.bpmn_identifier == "top_level_subprocess_script") + # .order_by(TaskModel.id.desc()) # type: ignore + # .first() + # ) + # assert task_model_to_reset_to is not None + # ProcessInstanceProcessor.reset_process(process_instance, task_model_to_reset_to.guid, commit=True) + # + # process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + # processor = ProcessInstanceProcessor(process_instance) + # processor.resume() + # processor.do_engine_steps(save=True) + # + # assert len(process_instance.active_human_tasks) == 1 + # human_task_one = process_instance.active_human_tasks[0] + # spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + # + # assert process_instance.status == "complete" def test_properly_saves_tasks_when_running( self, @@ -511,17 +513,18 @@ class TestProcessInstanceProcessor(BaseTest): f" {expected_task_data_key}." ) - count_failure_message = ( - f"{base_failure_message} There are more than 2 entries of this task in the db." - " There should only ever be max 2." - ) - task_models_with_bpmn_identifier_count = ( - TaskModel.query.join(TaskDefinitionModel) - .filter(TaskModel.process_instance_id == process_instance_relookup.id) - .filter(TaskDefinitionModel.bpmn_identifier == spiff_task.task_spec.name) - .count() - ) - assert task_models_with_bpmn_identifier_count < 3, count_failure_message + # TODO: add back in when removing MAYBE and LIKELY tasks + # count_failure_message = ( + # f"{base_failure_message} There are more than 2 entries of this task in the db." + # " There should only ever be max 2." + # ) + # task_models_with_bpmn_identifier_count = ( + # TaskModel.query.join(TaskDefinitionModel) + # .filter(TaskModel.process_instance_id == process_instance_relookup.id) + # .filter(TaskDefinitionModel.bpmn_identifier == spiff_task.task_spec.name) + # .count() + # ) + # assert task_models_with_bpmn_identifier_count < 3, count_failure_message task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() assert task_model.start_in_seconds is not None @@ -582,12 +585,13 @@ class TestProcessInstanceProcessor(BaseTest): ) assert task_bpmn_identifier in spiff_tasks_checked, message - task_models_that_are_predicted_count = ( - TaskModel.query.filter(TaskModel.process_instance_id == process_instance_relookup.id) - .filter(TaskModel.state.in_(["LIKELY", "MAYBE"])) # type: ignore - .count() - ) - assert task_models_that_are_predicted_count == 0 + # TODO: add back in when removing MAYBE and LIKELY tasks + # task_models_that_are_predicted_count = ( + # TaskModel.query.filter(TaskModel.process_instance_id == process_instance_relookup.id) + # .filter(TaskModel.state.in_(["LIKELY", "MAYBE"])) # type: ignore + # .count() + # ) + # assert task_models_that_are_predicted_count == 0 assert processor.get_data() == data_set_7 @@ -729,7 +733,6 @@ class TestProcessInstanceProcessor(BaseTest): spiff_task = processor.get_task_by_guid(human_task_two.task_id) ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task_two) - import pdb; pdb.set_trace() # ensure this does not raise a KeyError processor = ProcessInstanceProcessor(process_instance) assert len(process_instance.active_human_tasks) == 1 diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index eaf90955..aab94c11 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -674,16 +674,16 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const canResetProcess = (task: Task) => { - // // disabling this feature for now - // return false; - return ( - ability.can('POST', targetUris.processInstanceResetPath) && - processInstance && - processInstance.status === 'suspended' && - task.state === 'READY' && - !showingActiveTask() - ); + const canResetProcess = (_task: Task) => { + // disabling this feature for now + return false; + // return ( + // ability.can('POST', targetUris.processInstanceResetPath) && + // processInstance && + // processInstance.status === 'suspended' && + // task.state === 'READY' && + // !showingActiveTask() + // ); }; const getEvents = (task: Task) => { From a1a54c54bb96a69d3614ff98851f25e0fe7b85eb Mon Sep 17 00:00:00 2001 From: jbirddog <100367399+jbirddog@users.noreply.github.com> Date: Fri, 31 Mar 2023 10:59:09 -0400 Subject: [PATCH 145/162] Trip safe asserts in tests, various process instance queue improvements (#199) --- .gitignore | 1 + .../spiffworkflow_backend/config/default.py | 8 -- .../routes/process_instances_controller.py | 29 +--- .../routes/tasks_controller.py | 25 ++-- .../services/assertion_service.py | 2 +- .../services/process_instance_processor.py | 38 ++---- .../process_instance_queue_service.py | 82 +++++------- .../services/process_instance_service.py | 8 +- .../helpers/base_test.py | 2 +- .../unit/test_process_instance_processor.py | 40 ------ .../test_process_instance_queue_service.py | 124 ++++++++++++++++++ 11 files changed, 193 insertions(+), 166 deletions(-) create mode 100644 spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_queue_service.py diff --git a/.gitignore b/.gitignore index d391cd85..24a0ada5 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ pyrightconfig.json .idea/ t .dccache +*~ \ No newline at end of file diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py index 5c51e294..4ba0efd9 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py @@ -139,13 +139,5 @@ SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB = environ.get( "SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB", default="greedy" ) -SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_TIMES = int( - environ.get("SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_TIMES", default="3") -) - -SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_INTERVAL_IN_SECONDS = int( - environ.get("SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_INTERVAL_IN_SECONDS", default="1") -) - # this is only used in CI. use SPIFFWORKFLOW_BACKEND_DATABASE_URI instead for real configuration SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD = environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD", default=None) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index e27f68a5..51c304c5 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -56,9 +56,6 @@ from spiffworkflow_backend.services.error_handling_service import ErrorHandlingS from spiffworkflow_backend.services.git_service import GitCommandError from spiffworkflow_backend.services.git_service import GitService from spiffworkflow_backend.services.message_service import MessageService -from spiffworkflow_backend.services.process_instance_lock_service import ( - ProcessInstanceLockService, -) from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) @@ -105,7 +102,6 @@ def process_instance_create( process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier( process_model_identifier, g.user ) - ProcessInstanceQueueService.enqueue(process_instance) return Response( json.dumps(ProcessInstanceModelSchema().dump(process_instance)), status=201, @@ -131,7 +127,6 @@ def process_instance_run( if do_engine_steps: try: - processor.lock_process_instance("Web") processor.do_engine_steps(save=True) except ( ApiError, @@ -150,9 +145,6 @@ def process_instance_run( status_code=400, task=task, ) from e - finally: - if ProcessInstanceLockService.has_lock(process_instance.id): - processor.unlock_process_instance("Web") if not current_app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]: MessageService.correlate_all_message_instances() @@ -173,14 +165,11 @@ def process_instance_terminate( processor = ProcessInstanceProcessor(process_instance) try: - processor.lock_process_instance("Web") - processor.terminate() + with ProcessInstanceQueueService.dequeued(process_instance): + processor.terminate() except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e: ErrorHandlingService().handle_error(processor, e) raise e - finally: - if ProcessInstanceLockService.has_lock(process_instance.id): - processor.unlock_process_instance("Web") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -194,14 +183,11 @@ def process_instance_suspend( processor = ProcessInstanceProcessor(process_instance) try: - processor.lock_process_instance("Web") - processor.suspend() + with ProcessInstanceQueueService.dequeued(process_instance): + processor.suspend() except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e: ErrorHandlingService().handle_error(processor, e) raise e - finally: - if ProcessInstanceLockService.has_lock(process_instance.id): - processor.unlock_process_instance("Web") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -215,14 +201,11 @@ def process_instance_resume( processor = ProcessInstanceProcessor(process_instance) try: - processor.lock_process_instance("Web") - processor.resume() + with ProcessInstanceQueueService.dequeued(process_instance): + processor.resume() except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e: ErrorHandlingService().handle_error(processor, e) raise e - finally: - if ProcessInstanceLockService.has_lock(process_instance.id): - processor.unlock_process_instance("Web") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 3d0eac40..7145dcce 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -56,6 +56,9 @@ from spiffworkflow_backend.services.file_system_service import FileSystemService from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) +from spiffworkflow_backend.services.process_instance_queue_service import ( + ProcessInstanceQueueService, +) from spiffworkflow_backend.services.process_instance_service import ( ProcessInstanceService, ) @@ -426,21 +429,15 @@ def task_submit_shared( only_tasks_that_can_be_completed=True, ) - retry_times = current_app.config["SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_TIMES"] - retry_interval_in_seconds = current_app.config[ - "SPIFFWORKFLOW_BACKEND_USER_INPUT_REQUIRED_LOCK_RETRY_INTERVAL_IN_SECONDS" - ] - with sentry_sdk.start_span(op="task", description="complete_form_task"): - processor.lock_process_instance("Web", retry_times, retry_interval_in_seconds) - ProcessInstanceService.complete_form_task( - processor=processor, - spiff_task=spiff_task, - data=body, - user=g.user, - human_task=human_task, - ) - processor.unlock_process_instance("Web") + with ProcessInstanceQueueService.dequeued(process_instance): + ProcessInstanceService.complete_form_task( + processor=processor, + spiff_task=spiff_task, + data=body, + user=g.user, + human_task=human_task, + ) # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same # task spec, complete that form as well. diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/assertion_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/assertion_service.py index b9f7c61b..e8d534b8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/assertion_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/assertion_service.py @@ -14,5 +14,5 @@ def safe_assertion(condition: bool) -> Generator[bool, None, None]: if not condition: sentry_sdk.capture_exception(e) current_app.logger.exception(e) - if current_app.config["ENV_IDENTIFIER"] == "local_development": + if current_app.config["ENV_IDENTIFIER"] in ["local_development", "unit_testing"]: raise e diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 93cd64fb..d2579357 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -89,7 +89,6 @@ from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.scripts.script import Script from spiffworkflow_backend.services.custom_parser import MyCustomParser from spiffworkflow_backend.services.file_system_service import FileSystemService -from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsAlreadyLockedError from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate @@ -1544,29 +1543,6 @@ class ProcessInstanceProcessor: # current_app.logger.debug(f"the_status: {the_status} for instance {self.process_instance_model.id}") return the_status - # TODO: replace with implicit/more granular locking in workflow execution service - # TODO: remove the retry logic once all user_input_required's don't need to be locked to check timers - def lock_process_instance( - self, lock_prefix: str, retry_count: int = 0, retry_interval_in_seconds: int = 0 - ) -> None: - try: - ProcessInstanceQueueService.dequeue(self.process_instance_model) - except ProcessInstanceIsAlreadyLockedError as e: - if retry_count > 0: - current_app.logger.info( - f"process_instance_id {self.process_instance_model.id} is locked. " - f"will retry {retry_count} times with delay of {retry_interval_in_seconds}." - ) - if retry_interval_in_seconds > 0: - time.sleep(retry_interval_in_seconds) - self.lock_process_instance(lock_prefix, retry_count - 1, retry_interval_in_seconds) - else: - raise e - - # TODO: replace with implicit/more granular locking in workflow execution service - def unlock_process_instance(self, lock_prefix: str) -> None: - ProcessInstanceQueueService.enqueue(self.process_instance_model) - def process_bpmn_messages(self) -> None: """Process_bpmn_messages.""" bpmn_messages = self.bpmn_process_instance.get_bpmn_messages() @@ -1622,6 +1598,18 @@ class ProcessInstanceProcessor: exit_at: None = None, save: bool = False, execution_strategy_name: Optional[str] = None, + ) -> None: + with ProcessInstanceQueueService.dequeued(self.process_instance_model): + # TODO: ideally we just lock in the execution service, but not sure + # about _add_bpmn_process_definitions and if that needs to happen in + # the same lock like it does on main + self._do_engine_steps(exit_at, save, execution_strategy_name) + + def _do_engine_steps( + self, + exit_at: None = None, + save: bool = False, + execution_strategy_name: Optional[str] = None, ) -> None: self._add_bpmn_process_definitions() @@ -1646,7 +1634,7 @@ class ProcessInstanceProcessor: execution_service.do_engine_steps(exit_at, save) finally: # clear out failling spiff tasks here since the ProcessInstanceProcessor creates an instance of the - # script engine on a class variable. + # script engine on a class variable. if ( hasattr(self._script_engine, "failing_spiff_task") and self._script_engine.failing_spiff_task is not None diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py index 2d2bc4df..9021ab4d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_queue_service.py @@ -1,9 +1,9 @@ +import contextlib import time +from typing import Generator from typing import List from typing import Optional -from flask import current_app - from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus @@ -26,28 +26,32 @@ class ProcessInstanceIsAlreadyLockedError(Exception): class ProcessInstanceQueueService: """TODO: comment.""" - @staticmethod - def enqueue(process_instance: ProcessInstanceModel) -> None: - queue_item = ProcessInstanceLockService.try_unlock(process_instance.id) - - if queue_item is None: - queue_item = ProcessInstanceQueueModel(process_instance_id=process_instance.id) - + @classmethod + def _configure_and_save_queue_entry( + cls, process_instance: ProcessInstanceModel, queue_entry: ProcessInstanceQueueModel + ) -> None: # TODO: configurable params (priority/run_at) - queue_item.run_at_in_seconds = round(time.time()) - queue_item.priority = 2 - queue_item.status = process_instance.status - queue_item.locked_by = None - queue_item.locked_at_in_seconds = None + queue_entry.run_at_in_seconds = round(time.time()) + queue_entry.priority = 2 + queue_entry.status = process_instance.status + queue_entry.locked_by = None + queue_entry.locked_at_in_seconds = None - db.session.add(queue_item) + db.session.add(queue_entry) db.session.commit() - @staticmethod - def dequeue(process_instance: ProcessInstanceModel) -> None: - if ProcessInstanceLockService.has_lock(process_instance.id): - return + @classmethod + def enqueue_new_process_instance(cls, process_instance: ProcessInstanceModel) -> None: + queue_entry = ProcessInstanceQueueModel(process_instance_id=process_instance.id) + cls._configure_and_save_queue_entry(process_instance, queue_entry) + @classmethod + def _enqueue(cls, process_instance: ProcessInstanceModel) -> None: + queue_entry = ProcessInstanceLockService.unlock(process_instance.id) + cls._configure_and_save_queue_entry(process_instance, queue_entry) + + @classmethod + def _dequeue(cls, process_instance: ProcessInstanceModel) -> None: locked_by = ProcessInstanceLockService.locked_by() db.session.query(ProcessInstanceQueueModel).filter( @@ -82,6 +86,18 @@ class ProcessInstanceQueueService: ProcessInstanceLockService.lock(process_instance.id, queue_entry) + @classmethod + @contextlib.contextmanager + def dequeued(cls, process_instance: ProcessInstanceModel) -> Generator[None, None, None]: + reentering_lock = ProcessInstanceLockService.has_lock(process_instance.id) + try: + if not reentering_lock: + cls._dequeue(process_instance) + yield + finally: + if not reentering_lock: + cls._enqueue(process_instance) + @classmethod def entries_with_status( cls, @@ -105,31 +121,3 @@ class ProcessInstanceQueueService: queue_entries = cls.entries_with_status(status_value, None) ids_with_status = [entry.process_instance_id for entry in queue_entries] return ids_with_status - - @classmethod - def dequeue_many( - cls, - status_value: str = ProcessInstanceStatus.waiting.value, - ) -> List[int]: - locked_by = ProcessInstanceLockService.locked_by() - - # TODO: configurable params (priority/run_at/limit) - db.session.query(ProcessInstanceQueueModel).filter( - ProcessInstanceQueueModel.status == status_value, - ProcessInstanceQueueModel.locked_by.is_(None), # type: ignore - ).update( - { - "locked_by": locked_by, - } - ) - - db.session.commit() - - queue_entries = cls.entries_with_status(status_value, locked_by) - - locked_ids = ProcessInstanceLockService.lock_many(queue_entries) - - if len(locked_ids) > 0: - current_app.logger.info(f"{locked_by} dequeued_many: {locked_ids}") - - return locked_ids diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index 0da39886..39f6de15 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -70,6 +70,7 @@ class ProcessInstanceService: ) db.session.add(process_instance_model) db.session.commit() + ProcessInstanceQueueService.enqueue_new_process_instance(process_instance_model) return process_instance_model @classmethod @@ -111,9 +112,7 @@ class ProcessInstanceService: .filter(ProcessInstanceModel.id.in_(process_instance_ids_to_check)) # type: ignore .all() ) - process_instance_lock_prefix = "Background" for process_instance in records: - locked = False processor = None try: current_app.logger.info(f"Processing process_instance {process_instance.id}") @@ -122,8 +121,6 @@ class ProcessInstanceService: current_app.logger.info(f"Optimistically skipped process_instance {process_instance.id}") continue - processor.lock_process_instance(process_instance_lock_prefix) - locked = True db.session.refresh(process_instance) if process_instance.status == status_value: execution_strategy_name = current_app.config[ @@ -142,9 +139,6 @@ class ProcessInstanceService: + f"({process_instance.process_model_identifier}). {str(e)}" ) current_app.logger.error(error_message) - finally: - if locked and processor: - processor.unlock_process_instance(process_instance_lock_prefix) @staticmethod def processor_to_process_instance_api( diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py index 6b4d0143..03620228 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py @@ -304,7 +304,7 @@ class BaseTest: db.session.add(process_instance) db.session.commit() - ProcessInstanceQueueService.enqueue(process_instance) + ProcessInstanceQueueService.enqueue_new_process_instance(process_instance) return process_instance diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 1caa952d..f4f9d538 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -24,9 +24,6 @@ from spiffworkflow_backend.services.authorization_service import ( from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) -from spiffworkflow_backend.services.process_instance_queue_service import ( - ProcessInstanceIsAlreadyLockedError, -) from spiffworkflow_backend.services.process_instance_service import ( ProcessInstanceService, ) @@ -632,43 +629,6 @@ class TestProcessInstanceProcessor(BaseTest): assert len(process_instance.active_human_tasks) == 1 assert initial_human_task_id == process_instance.active_human_tasks[0].id - # TODO: port this test to queue_service test - def xxx_test_it_can_lock_and_unlock_a_process_instance( - self, - app: Flask, - client: FlaskClient, - with_db_and_bpmn_file_cleanup: None, - ) -> None: - initiator_user = self.find_or_create_user("initiator_user") - process_model = load_test_spec( - process_model_id="test_group/model_with_lanes", - bpmn_file_name="lanes_with_owner_dict.bpmn", - process_model_source_directory="model_with_lanes", - ) - process_instance = self.create_process_instance_from_process_model( - process_model=process_model, user=initiator_user - ) - processor = ProcessInstanceProcessor(process_instance) - assert process_instance.locked_by is None - assert process_instance.locked_at_in_seconds is None - processor.lock_process_instance("TEST") - - process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() - assert process_instance.locked_by is not None - assert process_instance.locked_at_in_seconds is not None - - with pytest.raises(ProcessInstanceIsAlreadyLockedError): - processor.lock_process_instance("TEST") - - # with pytest.raises(ProcessInstanceLockedBySomethingElseError): - # processor.unlock_process_instance("TEST2") - - processor.unlock_process_instance("TEST") - - process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() - assert process_instance.locked_by is None - assert process_instance.locked_at_in_seconds is None - def test_it_can_loopback_to_previous_bpmn_task_with_gateway( self, app: Flask, diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_queue_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_queue_service.py new file mode 100644 index 00000000..f676479f --- /dev/null +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_queue_service.py @@ -0,0 +1,124 @@ +"""Test_process_instance_queue_service.""" +from contextlib import suppress + +from flask.app import Flask +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.services.process_instance_lock_service import ( + ProcessInstanceLockService, +) +from spiffworkflow_backend.services.process_instance_queue_service import ( + ProcessInstanceQueueService, +) + + +class TestProcessInstanceQueueService(BaseTest): + """TestProcessInstanceQueueService.""" + + def _create_process_instance(self) -> ProcessInstanceModel: + initiator_user = self.find_or_create_user("initiator_user") + process_model = load_test_spec( + process_model_id="test_group/model_with_lanes", + bpmn_file_name="lanes.bpmn", + process_model_source_directory="model_with_lanes", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=initiator_user + ) + return process_instance + + def test_newly_created_process_instances_are_not_locked_when_added_to_the_queue( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + process_instance = self._create_process_instance() + assert not ProcessInstanceLockService.has_lock(process_instance.id) + queue_entries = ProcessInstanceQueueService.entries_with_status("not_started", None) + check_passed = False + for entry in queue_entries: + if entry.process_instance_id == process_instance.id: + assert entry.locked_by is None + check_passed = True + break + assert check_passed + + def test_peek_many_can_see_queue_entries_with_a_given_status( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + process_instance = self._create_process_instance() + queue_entry_ids = ProcessInstanceQueueService.peek_many("not_started") + assert process_instance.id in queue_entry_ids + + def test_can_run_some_code_with_a_dequeued_process_instance( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + process_instance = self._create_process_instance() + check_passed = False + with ProcessInstanceQueueService.dequeued(process_instance): + check_passed = True + assert check_passed + + def test_holds_a_lock_for_dequeued_process_instance( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + process_instance = self._create_process_instance() + assert not ProcessInstanceLockService.has_lock(process_instance.id) + with ProcessInstanceQueueService.dequeued(process_instance): + assert ProcessInstanceLockService.has_lock(process_instance.id) + assert not ProcessInstanceLockService.has_lock(process_instance.id) + + def test_unlocks_if_an_exception_is_thrown_with_a__dequeued_process_instance( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + process_instance = self._create_process_instance() + + with suppress(Exception): + with ProcessInstanceQueueService.dequeued(process_instance): + assert ProcessInstanceLockService.has_lock(process_instance.id) + raise Exception("just testing") + + assert not ProcessInstanceLockService.has_lock(process_instance.id) + + def test_can_call_dequeued_mulitple_times( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + process_instance = self._create_process_instance() + + with ProcessInstanceQueueService.dequeued(process_instance): + assert ProcessInstanceLockService.has_lock(process_instance.id) + + with ProcessInstanceQueueService.dequeued(process_instance): + assert ProcessInstanceLockService.has_lock(process_instance.id) + + with ProcessInstanceQueueService.dequeued(process_instance): + assert ProcessInstanceLockService.has_lock(process_instance.id) + + def test_can_nest_multiple_dequeued_calls( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + process_instance = self._create_process_instance() + + with ProcessInstanceQueueService.dequeued(process_instance): + with ProcessInstanceQueueService.dequeued(process_instance): + with ProcessInstanceQueueService.dequeued(process_instance): + assert ProcessInstanceLockService.has_lock(process_instance.id) + + assert ProcessInstanceLockService.has_lock(process_instance.id) + assert ProcessInstanceLockService.has_lock(process_instance.id) + + assert not ProcessInstanceLockService.has_lock(process_instance.id) From 44553cb651118312f4ea181f235ff5a19b62e712 Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 31 Mar 2023 12:45:14 -0400 Subject: [PATCH 146/162] added ability to save a form as draft w/ burnettk --- .../src/spiffworkflow_backend/api.yml | 4 +- .../routes/tasks_controller.py | 100 ++++++++++-------- .../services/process_instance_processor.py | 10 +- .../services/task_service.py | 22 ++-- .../src/routes/TaskShow.tsx | 54 ++++++++-- 5 files changed, 123 insertions(+), 67 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 06f482bc..373f1831 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -1832,10 +1832,10 @@ paths: description: The unique id of an existing process instance. schema: type: integer - - name: terminate_loop + - name: save_as_draft in: query required: false - description: Terminate the loop on a looping task + description: Save the data to task but do not complete it. schema: type: boolean get: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 7145dcce..7c8973aa 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -180,13 +180,7 @@ def task_data_show( process_instance_id: int, task_guid: str, ) -> flask.wrappers.Response: - task_model = TaskModel.query.filter_by(guid=task_guid, process_instance_id=process_instance_id).first() - if task_model is None: - raise ApiError( - error_code="task_not_found", - message=f"Cannot find a task with guid '{task_guid}' for process instance '{process_instance_id}'", - status_code=400, - ) + task_model = _get_task_model_from_guid_or_raise(task_guid, process_instance_id) task_model.data = task_model.json_data() return make_response(jsonify(task_model), 200) @@ -216,13 +210,11 @@ def task_data_update( if "new_task_data" in body: new_task_data_str: str = body["new_task_data"] new_task_data_dict = json.loads(new_task_data_str) - json_data_dict = TaskService.update_task_data_on_task_model( + json_data_dict = TaskService.update_task_data_on_task_model_and_return_dict_if_updated( task_model, new_task_data_dict, "json_data_hash" ) if json_data_dict is not None: TaskService.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict}) - # json_data = JsonDataModel(**json_data_dict) - # db.session.add(json_data) ProcessInstanceProcessor.add_event_to_process_instance( process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_guid ) @@ -389,11 +381,11 @@ def process_data_show( ) -def task_submit_shared( +def _task_submit_shared( process_instance_id: int, task_guid: str, body: Dict[str, Any], - terminate_loop: bool = False, + save_as_draft: bool = False, ) -> flask.wrappers.Response: principal = _find_principal_or_raise() process_instance = _find_process_instance_by_id_or_raise(process_instance_id) @@ -420,25 +412,10 @@ def task_submit_shared( ) ) - if terminate_loop and spiff_task.is_looping(): - spiff_task.terminate_loop() - - human_task = _find_human_task_or_raise( - process_instance_id=process_instance_id, - task_guid=task_guid, - only_tasks_that_can_be_completed=True, - ) - - with sentry_sdk.start_span(op="task", description="complete_form_task"): - with ProcessInstanceQueueService.dequeued(process_instance): - ProcessInstanceService.complete_form_task( - processor=processor, - spiff_task=spiff_task, - data=body, - user=g.user, - human_task=human_task, - ) - + # multi-instance code from crconnect - we may need it or may not + # if terminate_loop and spiff_task.is_looping(): + # spiff_task.terminate_loop() + # # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same # task spec, complete that form as well. # if update_all: @@ -449,15 +426,41 @@ def task_submit_shared( # last_index = next_task.task_info()["mi_index"] # next_task = processor.next_task() - next_human_task_assigned_to_me = ( - HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, completed=False) - .order_by(asc(HumanTaskModel.id)) # type: ignore - .join(HumanTaskUserModel) - .filter_by(user_id=principal.user_id) - .first() - ) - if next_human_task_assigned_to_me: - return make_response(jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200) + if save_as_draft: + task_model = _get_task_model_from_guid_or_raise(task_guid, process_instance_id) + json_data_dict = TaskService.update_task_data_on_task_model_and_return_dict_if_updated( + task_model, body, "json_data_hash" + ) + if json_data_dict is not None: + TaskService.insert_or_update_json_data_dict(json_data_dict) + db.session.add(task_model) + db.session.commit() + else: + human_task = _find_human_task_or_raise( + process_instance_id=process_instance_id, + task_guid=task_guid, + only_tasks_that_can_be_completed=True, + ) + + with sentry_sdk.start_span(op="task", description="complete_form_task"): + with ProcessInstanceQueueService.dequeued(process_instance): + ProcessInstanceService.complete_form_task( + processor=processor, + spiff_task=spiff_task, + data=body, + user=g.user, + human_task=human_task, + ) + + next_human_task_assigned_to_me = ( + HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, completed=False) + .order_by(asc(HumanTaskModel.id)) # type: ignore + .join(HumanTaskUserModel) + .filter_by(user_id=principal.user_id) + .first() + ) + if next_human_task_assigned_to_me: + return make_response(jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200) return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") @@ -466,11 +469,11 @@ def task_submit( process_instance_id: int, task_guid: str, body: Dict[str, Any], - terminate_loop: bool = False, + save_as_draft: bool = False, ) -> flask.wrappers.Response: """Task_submit_user_data.""" with sentry_sdk.start_span(op="controller_action", description="tasks_controller.task_submit"): - return task_submit_shared(process_instance_id, task_guid, body, terminate_loop) + return _task_submit_shared(process_instance_id, task_guid, body, save_as_draft) def _get_tasks( @@ -764,3 +767,16 @@ def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> Non relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[hidden_field_part] if len(hidden_field_parts) == ii + 1: relevant_depth_of_ui_schema["ui:widget"] = "hidden" + + +def _get_task_model_from_guid_or_raise(task_guid: str, process_instance_id: int) -> TaskModel: + task_model: Optional[TaskModel] = TaskModel.query.filter_by( + guid=task_guid, process_instance_id=process_instance_id + ).first() + if task_model is None: + raise ApiError( + error_code="task_not_found", + message=f"Cannot find a task with guid '{task_guid}' for process instance '{process_instance_id}'", + status_code=400, + ) + return task_model diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index d2579357..5505f635 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -93,6 +93,7 @@ from spiffworkflow_backend.services.process_instance_queue_service import Proces from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate from spiffworkflow_backend.services.spec_file_service import SpecFileService +from spiffworkflow_backend.services.task_service import JsonDataDict from spiffworkflow_backend.services.task_service import TaskService from spiffworkflow_backend.services.user_service import UserService from spiffworkflow_backend.services.workflow_execution_service import ( @@ -1790,12 +1791,9 @@ class ProcessInstanceProcessor: db.session.add(human_task) json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self._serializer) - for json_data_dict in json_data_dict_list: - if json_data_dict is not None: - json_data = db.session.query(JsonDataModel.id).filter_by(hash=json_data_dict["hash"]).first() - if json_data is None: - json_data = JsonDataModel(**json_data_dict) - db.session.add(json_data) + json_data_dict_mapping: dict[str, JsonDataDict] = {} + TaskService.update_json_data_dicts_using_list(json_data_dict_list, json_data_dict_mapping) + TaskService.insert_or_update_json_data_records(json_data_dict_mapping) self.add_event_to_process_instance( self.process_instance_model, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index e9839fa7..b89c0bfb 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -130,7 +130,7 @@ class TaskService: self.task_models[task_model.guid] = task_model if bpmn_process_json_data is not None: json_data_dict_list.append(bpmn_process_json_data) - self._update_json_data_dicts_using_list(json_data_dict_list, self.json_data_dicts) + self.update_json_data_dicts_using_list(json_data_dict_list, self.json_data_dicts) if task_model.state == "COMPLETED" or task_failed: event_type = ProcessInstanceEventType.task_completed.value @@ -207,8 +207,12 @@ class TaskService: python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer) task_model.properties_json = new_properties_json task_model.state = TaskStateNames[new_properties_json["state"]] - json_data_dict = cls.update_task_data_on_task_model(task_model, spiff_task_data, "json_data_hash") - python_env_dict = cls.update_task_data_on_task_model(task_model, python_env_data_dict, "python_env_data_hash") + json_data_dict = cls.update_task_data_on_task_model_and_return_dict_if_updated( + task_model, spiff_task_data, "json_data_hash" + ) + python_env_dict = cls.update_task_data_on_task_model_and_return_dict_if_updated( + task_model, python_env_data_dict, "python_env_data_hash" + ) return [json_data_dict, python_env_dict] @classmethod @@ -446,7 +450,11 @@ class TaskService: return json_data_dict @classmethod - def update_task_data_on_task_model( + def insert_or_update_json_data_dict(cls, json_data_dict: JsonDataDict) -> None: + TaskService.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict}) + + @classmethod + def update_task_data_on_task_model_and_return_dict_if_updated( cls, task_model: TaskModel, task_data_dict: dict, task_model_data_column: str ) -> Optional[JsonDataDict]: task_data_json = json.dumps(task_data_dict, sort_keys=True) @@ -501,11 +509,11 @@ class TaskService: python_env_data_hash: Optional[str] = None, ) -> None: if json_data_hash is None: - cls.update_task_data_on_task_model(task_model, {}, "json_data_hash") + cls.update_task_data_on_task_model_and_return_dict_if_updated(task_model, {}, "json_data_hash") else: task_model.json_data_hash = json_data_hash if python_env_data_hash is None: - cls.update_task_data_on_task_model(task_model, {}, "python_env_data") + cls.update_task_data_on_task_model_and_return_dict_if_updated(task_model, {}, "python_env_data") else: task_model.python_env_data_hash = python_env_data_hash @@ -556,7 +564,7 @@ class TaskService: return converted_data @classmethod - def _update_json_data_dicts_using_list( + def update_json_data_dicts_using_list( cls, json_data_dict_list: list[Optional[JsonDataDict]], json_data_dicts: dict[str, JsonDataDict] ) -> None: for json_data_dict in json_data_dict_list: diff --git a/spiffworkflow-frontend/src/routes/TaskShow.tsx b/spiffworkflow-frontend/src/routes/TaskShow.tsx index 058ee0b5..5362cf32 100644 --- a/spiffworkflow-frontend/src/routes/TaskShow.tsx +++ b/spiffworkflow-frontend/src/routes/TaskShow.tsx @@ -1,4 +1,4 @@ -import { useEffect, useState } from 'react'; +import React, { useEffect, useState } from 'react'; import { useNavigate, useParams } from 'react-router-dom'; import validator from '@rjsf/validator-ajv8'; @@ -9,6 +9,7 @@ import { Grid, Column, Button, + ButtonSet, // @ts-ignore } from '@carbon/react'; @@ -21,6 +22,13 @@ import { modifyProcessIdentifierForPathParam } from '../helpers'; import { ProcessInstanceTask } from '../interfaces'; import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; +class UnexpectedHumanTaskType extends Error { + constructor(message: string) { + super(message); + this.name = 'UnexpectedHumanTaskType'; + } +} + export default function TaskShow() { const [task, setTask] = useState(null); const [userTasks] = useState(null); @@ -30,6 +38,9 @@ export default function TaskShow() { const { addError, removeError } = useAPIError(); + // eslint-disable-next-line sonarjs/no-duplicate-string + const supportedHumanTaskTypes = ['User Task', 'Manual Task']; + useEffect(() => { const processResult = (result: ProcessInstanceTask) => { setTask(result); @@ -76,16 +87,22 @@ export default function TaskShow() { } }; - const handleFormSubmit = (event: any) => { + const handleFormSubmit = (formObject: any, event: any) => { if (disabled) { return; } + const submitButtonId = event.nativeEvent.submitter.id; + let queryParams = ''; + console.log('submitButtonId', submitButtonId); + if (submitButtonId === 'save-as-draft-button') { + queryParams = '?save_as_draft=true'; + } setDisabled(true); removeError(); - const dataToSubmit = event.formData; + const dataToSubmit = formObject.formData; delete dataToSubmit.isManualTask; HttpService.makeCallToBackend({ - path: `/tasks/${params.process_instance_id}/${params.task_id}`, + path: `/tasks/${params.process_instance_id}/${params.task_id}${queryParams}`, successCallback: processSubmitResult, failureCallback: (error: any) => { addError(error); @@ -226,16 +243,33 @@ export default function TaskShow() { } if (task.state === 'READY') { - let buttonText = 'Submit'; + let submitButtonText = 'Submit'; + let saveAsDraftButton = null; if (task.type === 'Manual Task') { - buttonText = 'Continue'; + submitButtonText = 'Continue'; + } else if (task.type === 'User Task') { + saveAsDraftButton = ( + + ); + } else { + throw new UnexpectedHumanTaskType( + `Invalid task type given: ${task.type}. Only supported types: ${supportedHumanTaskTypes}` + ); } reactFragmentToHideSubmitButton = ( -
- -
+ {saveAsDraftButton} + ); } From 653d15acf378c582c4c9e9dc9f10e56b4bc73d98 Mon Sep 17 00:00:00 2001 From: Dan Date: Fri, 31 Mar 2023 14:56:29 -0400 Subject: [PATCH 147/162] Zoom to fit viewport when opening a diagram. --- spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx b/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx index e3989c63..eefaff82 100644 --- a/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx +++ b/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx @@ -450,7 +450,10 @@ export default function ReactDiagramEditor({ if (alreadyImportedXmlRef.current) { return; } - diagramModelerToUse.importXML(diagramXMLToDisplay); + diagramModelerToUse.importXML(diagramXMLToDisplay).then(() => { + diagramModelerToUse.get('canvas').zoom('fit-viewport'); + }); + alreadyImportedXmlRef.current = true; } From b943d90d92da74f9c5849fde5614286027e80f58 Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 31 Mar 2023 15:14:25 -0400 Subject: [PATCH 148/162] rewind test passed w/ burnettk --- spiffworkflow-backend/poetry.lock | 6 +- spiffworkflow-backend/pyproject.toml | 2 +- .../services/process_instance_processor.py | 246 +++++++++-------- .../services/task_service.py | 14 +- .../unit/test_process_instance_processor.py | 251 +++++++++--------- 5 files changed, 269 insertions(+), 250 deletions(-) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index 356529d2..3e0f4694 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1889,8 +1889,8 @@ lxml = "*" [package.source] type = "git" url = "https://github.com/sartography/SpiffWorkflow" -reference = "main" -resolved_reference = "62454c99c3a711c38f4249a3b5e7215d42037d72" +reference = "bugfix/execute-event-gateways-on-ready" +resolved_reference = "a1795209b415037630a44522fc7cc9d6e70e50d6" [[package]] name = "sqlalchemy" @@ -2273,7 +2273,7 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.12" -content-hash = "9fea44386fbab29102a051a254058909568c4ee3dbd6a402fb91aacbcf1f7fd2" +content-hash = "2dc5b510dcd40c461934921401b09ce6cf9f49ddb440192e819556fbbc6cdbfc" [metadata.files] alabaster = [ diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index df2495e0..6bdb5b0f 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -27,7 +27,7 @@ flask-marshmallow = "*" flask-migrate = "*" flask-restful = "*" werkzeug = "*" -SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} +SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "bugfix/execute-event-gateways-on-ready"} # SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "6cad2981712bb61eca23af1adfafce02d3277cb9"} # SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" } sentry-sdk = "^1.10" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 5505f635..c0412512 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1,5 +1,8 @@ """Process_instance_processor.""" import _strptime # type: ignore +import copy +from sqlalchemy import or_ +from sqlalchemy import and_ import decimal import json import logging @@ -741,6 +744,9 @@ class ProcessInstanceProcessor: spec, subprocesses ) bpmn_process_instance.data[ProcessInstanceProcessor.VALIDATION_PROCESS_KEY] = validate_only + + # run _predict to ensure tasks are predicted to add back in LIKELY and MAYBE tasks + bpmn_process_instance._predict() return ( bpmn_process_instance, full_bpmn_process_dict, @@ -1263,123 +1269,129 @@ class ProcessInstanceProcessor: cls, process_instance: ProcessInstanceModel, to_task_guid: str, commit: Optional[bool] = False ) -> None: """Reset a process to an earlier state.""" - raise Exception("This feature to reset a process instance to a given task is currently unavaiable") - # cls.add_event_to_process_instance( - # process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid - # ) - # - # to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() - # if to_task_model is None: - # raise TaskNotFoundError( - # f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" - # ) - # - # parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes( - # to_task_model - # ) - # [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] - # [p.id for p in parent_bpmn_processes] - # tasks_to_update_query = db.session.query(TaskModel).filter( - # and_( - # or_( - # TaskModel.end_in_seconds > to_task_model.end_in_seconds, - # TaskModel.end_in_seconds.is_(None), # type: ignore - # ), - # TaskModel.process_instance_id == process_instance.id, - # # TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore - # ) - # ) - # tasks_to_update = tasks_to_update_query.all() - # - # # run all queries before making changes to task_model - # if commit: - # # tasks_to_delete_query = db.session.query(TaskModel).filter( - # # and_( - # # or_( - # # TaskModel.end_in_seconds > to_task_model.end_in_seconds, - # # TaskModel.end_in_seconds.is_not(None), # type: ignore - # # ), - # # TaskModel.process_instance_id == process_instance.id, - # # TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore - # # TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore - # # ) - # # ) - # # - # # tasks_to_delete = tasks_to_delete_query.all() - # # - # # # delete any later tasks from to_task_model and delete bpmn processes that may be - # # # link directly to one of those tasks. - # # tasks_to_delete_guids = [t.guid for t in tasks_to_delete] - # # tasks_to_delete_ids = [t.id for t in tasks_to_delete] - # # bpmn_processes_to_delete = BpmnProcessModel.query.filter( - # # BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore - # # ).order_by(BpmnProcessModel.id.desc()).all() - # # human_tasks_to_delete = HumanTaskModel.query.filter( - # # HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore - # # ).all() - # # - # # - # # import pdb; pdb.set_trace() - # # # ensure the correct order for foreign keys - # # for human_task_to_delete in human_tasks_to_delete: - # # db.session.delete(human_task_to_delete) - # # db.session.commit() - # # for task_to_delete in tasks_to_delete: - # # db.session.delete(task_to_delete) - # # db.session.commit() - # # for bpmn_process_to_delete in bpmn_processes_to_delete: - # # db.session.delete(bpmn_process_to_delete) - # # db.session.commit() - # - # related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() - # if related_human_task is not None: - # db.session.delete(related_human_task) - # - # tasks_to_update_ids = [t.id for t in tasks_to_update] - # human_tasks_to_delete = HumanTaskModel.query.filter( - # HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore - # ).all() - # for human_task_to_delete in human_tasks_to_delete: - # db.session.delete(human_task_to_delete) - # db.session.commit() - # - # for task_to_update in tasks_to_update: - # # print(f"task_to_update: {task_to_update}") - # print(f"task_to_update.state: {task_to_update.state}") - # TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) - # # TaskService.reset_task_model(task_to_update, state=task_to_update.state, commit=commit) - # # if task_to_update.task_definition.bpmn_identifier != 'top_level_process_script_after_gate': - # # TaskService.reset_task_model(task_to_update, state='FUTURE', commit=commit) - # # else: - # # TaskService.reset_task_model(task_to_update, state=task_to_update.state, commit=commit) - # - # parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first() - # if parent_task_model is None: - # raise TaskNotFoundError( - # f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" - # ) - # - # TaskService.reset_task_model( - # to_task_model, - # state="READY", - # json_data_hash=parent_task_model.json_data_hash, - # python_env_data_hash=parent_task_model.python_env_data_hash, - # commit=commit, - # ) - # for task_model in task_models_of_parent_bpmn_processes: - # TaskService.reset_task_model(task_model, state="WAITING", commit=commit) - # - # bpmn_process = to_task_model.bpmn_process - # properties_json = copy.copy(bpmn_process.properties_json) - # properties_json["last_task"] = parent_task_model.guid - # bpmn_process.properties_json = properties_json - # db.session.add(bpmn_process) - # db.session.commit() - # - # if commit: - # processor = ProcessInstanceProcessor(process_instance) - # processor.save() - # processor.suspend() + # raise Exception("This feature to reset a process instance to a given task is currently unavaiable") + cls.add_event_to_process_instance( + process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid + ) + + to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() + if to_task_model is None: + raise TaskNotFoundError( + f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" + ) + + parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes( + to_task_model + ) + task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] + parent_bpmn_processes_ids = [p.id for p in parent_bpmn_processes] + tasks_to_update_query = db.session.query(TaskModel).filter( + and_( + or_( + TaskModel.end_in_seconds > to_task_model.end_in_seconds, + TaskModel.end_in_seconds.is_(None), # type: ignore + ), + TaskModel.process_instance_id == process_instance.id, + TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore + ) + ) + tasks_to_update = tasks_to_update_query.all() + tasks_to_update_guids = [t.guid for t in tasks_to_update] + bpmn_processes_to_update_query = db.session.query(BpmnProcessModel).filter( + and_( + BpmnProcessModel.guid.in_(tasks_to_update_guids), # type: ignore + BpmnProcessModel.id.not_in(parent_bpmn_processes_ids), # type: ignore + ) + ) + bpmn_processes_to_update = bpmn_processes_to_update_query.all() + + # run all queries before making changes to task_model + if commit: + tasks_to_delete_query = db.session.query(TaskModel).filter( + and_( + or_( + TaskModel.end_in_seconds > to_task_model.end_in_seconds, + TaskModel.end_in_seconds.is_not(None), # type: ignore + ), + TaskModel.process_instance_id == process_instance.id, + TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore + TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore + ) + ) + + tasks_to_delete = tasks_to_delete_query.all() + + # delete any later tasks from to_task_model and delete bpmn processes that may be + # link directly to one of those tasks. + tasks_to_delete_guids = [t.guid for t in tasks_to_delete] + tasks_to_delete_ids = [t.id for t in tasks_to_delete] + bpmn_processes_to_delete = BpmnProcessModel.query.filter( + BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore + ).order_by(BpmnProcessModel.id.desc()).all() + human_tasks_to_delete = HumanTaskModel.query.filter( + HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore + ).all() + + + # import pdb; pdb.set_trace() + # ensure the correct order for foreign keys + for human_task_to_delete in human_tasks_to_delete: + db.session.delete(human_task_to_delete) + db.session.commit() + for task_to_delete in tasks_to_delete: + db.session.delete(task_to_delete) + db.session.commit() + for bpmn_process_to_delete in bpmn_processes_to_delete: + db.session.delete(bpmn_process_to_delete) + db.session.commit() + + related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() + if related_human_task is not None: + db.session.delete(related_human_task) + + tasks_to_update_ids = [t.id for t in tasks_to_update] + human_tasks_to_delete = HumanTaskModel.query.filter( + HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore + ).all() + for human_task_to_delete in human_tasks_to_delete: + db.session.delete(human_task_to_delete) + db.session.commit() + + for task_to_update in tasks_to_update: + print(f"task_to_update.state: {task_to_update.state}") + TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) + + for bpmn_process_to_update in bpmn_processes_to_update: + db.session.delete(bpmn_process_to_update) + + parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first() + if parent_task_model is None: + raise TaskNotFoundError( + f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" + ) + + TaskService.reset_task_model( + to_task_model, + state="READY", + json_data_hash=parent_task_model.json_data_hash, + python_env_data_hash=parent_task_model.python_env_data_hash, + commit=commit, + ) + for task_model in task_models_of_parent_bpmn_processes: + TaskService.reset_task_model(task_model, state="WAITING", commit=commit) + + bpmn_process = to_task_model.bpmn_process + properties_json = copy.copy(bpmn_process.properties_json) + properties_json["last_task"] = parent_task_model.guid + bpmn_process.properties_json = properties_json + db.session.add(bpmn_process) + db.session.commit() + + import pdb; pdb.set_trace() + if commit: + processor = ProcessInstanceProcessor(process_instance) + processor.save() + processor.suspend() @staticmethod def get_parser() -> MyCustomParser: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index b89c0bfb..b6ca0c73 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -68,9 +68,9 @@ class TaskService: spiff_task: SpiffTask, ) -> None: for child_spiff_task in spiff_task.children: - # if child_spiff_task._has_state(TaskState.PREDICTED_MASK): - # self.__class__.remove_spiff_task_from_parent(child_spiff_task, self.task_models) - # continue + if child_spiff_task._has_state(TaskState.PREDICTED_MASK): + self.__class__.remove_spiff_task_from_parent(child_spiff_task, self.task_models) + continue self.update_task_model_with_spiff_task( spiff_task=child_spiff_task, ) @@ -157,7 +157,7 @@ class TaskService: bpmn_process: BpmnProcessModel, ) -> None: new_properties_json = copy.copy(bpmn_process.properties_json) - new_properties_json["last_task"] = str(spiff_workflow.last_task) if spiff_workflow.last_task else None + new_properties_json["last_task"] = str(spiff_workflow.last_task.id) if spiff_workflow.last_task else None new_properties_json["success"] = spiff_workflow.success bpmn_process.properties_json = new_properties_json @@ -403,9 +403,9 @@ class TaskService: # we are going to avoid saving likely and maybe tasks to the db. # that means we need to remove them from their parents' lists of children as well. spiff_task = spiff_workflow.get_task_from_id(UUID(task_id)) - # if spiff_task._has_state(TaskState.PREDICTED_MASK): - # cls.remove_spiff_task_from_parent(spiff_task, new_task_models) - # continue + if spiff_task._has_state(TaskState.PREDICTED_MASK): + cls.remove_spiff_task_from_parent(spiff_task, new_task_models) + continue task_model = TaskModel.query.filter_by(guid=task_id).first() if task_model is None: diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index f4f9d538..6580780b 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -1,5 +1,6 @@ """Test_process_instance_processor.""" from uuid import UUID +from spiffworkflow_backend.models.task_definition import TaskDefinitionModel import pytest from flask import g @@ -254,128 +255,134 @@ class TestProcessInstanceProcessor(BaseTest): assert spiff_task is not None assert spiff_task.state == TaskState.COMPLETED - # def test_properly_resets_process_to_given_task( - # self, - # app: Flask, - # client: FlaskClient, - # with_db_and_bpmn_file_cleanup: None, - # with_super_admin_user: UserModel, - # ) -> None: - # self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") - # initiator_user = self.find_or_create_user("initiator_user") - # finance_user_three = self.find_or_create_user("testuser3") - # assert initiator_user.principal is not None - # assert finance_user_three.principal is not None - # AuthorizationService.import_permissions_from_yaml_file() - # - # finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() - # assert finance_group is not None - # - # process_model = load_test_spec( - # process_model_id="test_group/manual_task", - # process_model_source_directory="manual_task", - # ) - # process_instance = self.create_process_instance_from_process_model( - # process_model=process_model, user=initiator_user - # ) - # processor = ProcessInstanceProcessor(process_instance) - # processor.do_engine_steps(save=True) - # assert len(process_instance.active_human_tasks) == 1 - # initial_human_task_id = process_instance.active_human_tasks[0].id - # - # # save again to ensure we go attempt to process the human tasks again - # processor.save() - # - # assert len(process_instance.active_human_tasks) == 1 - # assert initial_human_task_id == process_instance.active_human_tasks[0].id - # - # processor = ProcessInstanceProcessor(process_instance) - # human_task_one = process_instance.active_human_tasks[0] - # spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( - # human_task_one.task_name, processor.bpmn_process_instance - # ) - # assert spiff_manual_task is not None - # - # processor.suspend() - # ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.parent.id), commit=True) - # - # process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() - # processor = ProcessInstanceProcessor(process_instance) - # processor.resume() - # processor.do_engine_steps(save=True) - # human_task_one = process_instance.active_human_tasks[0] - # spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) - # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - # assert process_instance.status == "complete" - # - # def test_properly_resets_process_to_given_task_with_call_activity( - # self, - # app: Flask, - # client: FlaskClient, - # with_db_and_bpmn_file_cleanup: None, - # with_super_admin_user: UserModel, - # ) -> None: - # self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") - # initiator_user = self.find_or_create_user("initiator_user") - # finance_user_three = self.find_or_create_user("testuser3") - # assert initiator_user.principal is not None - # assert finance_user_three.principal is not None - # AuthorizationService.import_permissions_from_yaml_file() - # - # finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() - # assert finance_group is not None - # - # process_model = load_test_spec( - # process_model_id="test_group/manual_task_with_subprocesses", - # process_model_source_directory="manual_task_with_subprocesses", - # ) - # process_instance = self.create_process_instance_from_process_model( - # process_model=process_model, user=initiator_user - # ) - # processor = ProcessInstanceProcessor(process_instance) - # processor.do_engine_steps(save=True) - # # with open("before_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2)) - # assert len(process_instance.active_human_tasks) == 1 - # initial_human_task_id = process_instance.active_human_tasks[0].id - # assert len(process_instance.active_human_tasks) == 1 - # assert initial_human_task_id == process_instance.active_human_tasks[0].id - # - # human_task_one = process_instance.active_human_tasks[0] - # spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) - # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - # human_task_one = process_instance.active_human_tasks[0] - # spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) - # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - # - # # NOTES: - # # somehow we are hosing the task state so that when completing tasks of a subprocess, the task AFTER the subprocess task - # # is not marked READY but instead stays as FUTURE. Running things like: - # # self.last_completed_spiff_task.task_spec._update(self.last_completed_spiff_task) - # # and - # # self.last_completed_spiff_task.task_spec._predict(self.last_completed_spiff_task, mask=TaskState.NOT_FINISHED_MASK) - # # did not help. - # - # processor.suspend() - # task_model_to_reset_to = ( - # TaskModel.query.join(TaskDefinitionModel) - # .filter(TaskDefinitionModel.bpmn_identifier == "top_level_subprocess_script") - # .order_by(TaskModel.id.desc()) # type: ignore - # .first() - # ) - # assert task_model_to_reset_to is not None - # ProcessInstanceProcessor.reset_process(process_instance, task_model_to_reset_to.guid, commit=True) - # - # process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() - # processor = ProcessInstanceProcessor(process_instance) - # processor.resume() - # processor.do_engine_steps(save=True) - # - # assert len(process_instance.active_human_tasks) == 1 - # human_task_one = process_instance.active_human_tasks[0] - # spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) - # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - # - # assert process_instance.status == "complete" + def test_properly_resets_process_to_given_task( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") + initiator_user = self.find_or_create_user("initiator_user") + finance_user_three = self.find_or_create_user("testuser3") + assert initiator_user.principal is not None + assert finance_user_three.principal is not None + AuthorizationService.import_permissions_from_yaml_file() + + finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() + assert finance_group is not None + + process_model = load_test_spec( + process_model_id="test_group/manual_task", + process_model_source_directory="manual_task", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=initiator_user + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + assert len(process_instance.active_human_tasks) == 1 + initial_human_task_id = process_instance.active_human_tasks[0].id + + # save again to ensure we go attempt to process the human tasks again + processor.save() + + assert len(process_instance.active_human_tasks) == 1 + assert initial_human_task_id == process_instance.active_human_tasks[0].id + + processor = ProcessInstanceProcessor(process_instance) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( + human_task_one.task_name, processor.bpmn_process_instance + ) + assert spiff_manual_task is not None + + processor.suspend() + ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.parent.id), commit=True) + + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + processor = ProcessInstanceProcessor(process_instance) + processor.resume() + processor.do_engine_steps(save=True) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + assert process_instance.status == "complete" + + def test_properly_resets_process_to_given_task_with_call_activity( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") + initiator_user = self.find_or_create_user("initiator_user") + finance_user_three = self.find_or_create_user("testuser3") + assert initiator_user.principal is not None + assert finance_user_three.principal is not None + AuthorizationService.import_permissions_from_yaml_file() + + finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() + assert finance_group is not None + + process_model = load_test_spec( + process_model_id="test_group/manual_task_with_subprocesses", + process_model_source_directory="manual_task_with_subprocesses", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=initiator_user + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + # with open("before_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2)) + assert len(process_instance.active_human_tasks) == 1 + initial_human_task_id = process_instance.active_human_tasks[0].id + assert len(process_instance.active_human_tasks) == 1 + assert initial_human_task_id == process_instance.active_human_tasks[0].id + + # import pdb; pdb.set_trace() + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + # import pdb; pdb.set_trace() + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + # import pdb; pdb.set_trace() + + # NOTES: + # somehow we are hosing the task state so that when completing tasks of a subprocess, the task AFTER the subprocess task + # is not marked READY but instead stays as FUTURE. Running things like: + # self.last_completed_spiff_task.task_spec._update(self.last_completed_spiff_task) + # and + # self.last_completed_spiff_task.task_spec._predict(self.last_completed_spiff_task, mask=TaskState.NOT_FINISHED_MASK) + # did not help. + + processor.suspend() + task_model_to_reset_to = ( + TaskModel.query.join(TaskDefinitionModel) + .filter(TaskDefinitionModel.bpmn_identifier == "top_level_subprocess_script") + .order_by(TaskModel.id.desc()) # type: ignore + .first() + ) + assert task_model_to_reset_to is not None + import pdb; pdb.set_trace() + ProcessInstanceProcessor.reset_process(process_instance, task_model_to_reset_to.guid, commit=True) + import pdb; pdb.set_trace() + + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + processor = ProcessInstanceProcessor(process_instance) + processor.resume() + processor.do_engine_steps(save=True) + import pdb; pdb.set_trace() + + assert len(process_instance.active_human_tasks) == 1 + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + + assert process_instance.status == "complete" def test_properly_saves_tasks_when_running( self, From 6a3b4e5dfaef1817410256737b8feaf37d9c643b Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 31 Mar 2023 15:42:18 -0400 Subject: [PATCH 149/162] cleaned up the reset code w/ burnettk --- .../routes/process_instances_controller.py | 2 +- .../services/process_instance_processor.py | 124 ++++++++---------- .../services/task_service.py | 15 +-- .../unit/test_process_instance_processor.py | 12 +- .../src/routes/ProcessInstanceShow.tsx | 18 +-- 5 files changed, 77 insertions(+), 94 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 51c304c5..dd0ad195 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -693,7 +693,7 @@ def process_instance_reset( ) -> flask.wrappers.Response: """Reset a process instance to a particular step.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) - ProcessInstanceProcessor.reset_process(process_instance, to_task_guid, commit=True) + ProcessInstanceProcessor.reset_process(process_instance, to_task_guid) return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index c0412512..93f3a811 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1,8 +1,6 @@ """Process_instance_processor.""" import _strptime # type: ignore import copy -from sqlalchemy import or_ -from sqlalchemy import and_ import decimal import json import logging @@ -53,6 +51,8 @@ from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ign from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore +from sqlalchemy import and_ +from sqlalchemy import or_ from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel @@ -1265,9 +1265,7 @@ class ProcessInstanceProcessor: # they never get picked up by spiff and processed. The process instance just stops after the to_task_guid # and marks itself complete without processing any of the children. @classmethod - def reset_process( - cls, process_instance: ProcessInstanceModel, to_task_guid: str, commit: Optional[bool] = False - ) -> None: + def reset_process(cls, process_instance: ProcessInstanceModel, to_task_guid: str) -> None: """Reset a process to an earlier state.""" # raise Exception("This feature to reset a process instance to a given task is currently unavaiable") cls.add_event_to_process_instance( @@ -1280,11 +1278,13 @@ class ProcessInstanceProcessor: f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" ) + # NOTE: run ALL queries before making changes to ensure we get everything before anything changes parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes( to_task_model ) task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] parent_bpmn_processes_ids = [p.id for p in parent_bpmn_processes] + tasks_to_update_query = db.session.query(TaskModel).filter( and_( or_( @@ -1297,72 +1297,67 @@ class ProcessInstanceProcessor: ) tasks_to_update = tasks_to_update_query.all() tasks_to_update_guids = [t.guid for t in tasks_to_update] - bpmn_processes_to_update_query = db.session.query(BpmnProcessModel).filter( + + tasks_to_delete_query = db.session.query(TaskModel).filter( and_( - BpmnProcessModel.guid.in_(tasks_to_update_guids), # type: ignore - BpmnProcessModel.id.not_in(parent_bpmn_processes_ids), # type: ignore + or_( + TaskModel.end_in_seconds > to_task_model.end_in_seconds, + TaskModel.end_in_seconds.is_not(None), # type: ignore + ), + TaskModel.process_instance_id == process_instance.id, + TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore + TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore ) ) - bpmn_processes_to_update = bpmn_processes_to_update_query.all() + tasks_to_delete = tasks_to_delete_query.all() + tasks_to_delete_guids = [t.guid for t in tasks_to_delete] + tasks_to_delete_ids = [t.id for t in tasks_to_delete] - # run all queries before making changes to task_model - if commit: - tasks_to_delete_query = db.session.query(TaskModel).filter( + # delete bpmn processes that are also tasks that we either deleted or will update. + # this is to force spiff to recreate those bpmn processes with the correct associated task guids. + bpmn_processes_to_delete_query = db.session.query(BpmnProcessModel).filter( + or_( + BpmnProcessModel.guid.in_(tasks_to_delete_guids), # type: ignore and_( - or_( - TaskModel.end_in_seconds > to_task_model.end_in_seconds, - TaskModel.end_in_seconds.is_not(None), # type: ignore - ), - TaskModel.process_instance_id == process_instance.id, - TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore - TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore - ) + BpmnProcessModel.guid.in_(tasks_to_update_guids), # type: ignore + BpmnProcessModel.id.not_in(parent_bpmn_processes_ids), # type: ignore + ), ) + ) + bpmn_processes_to_delete = bpmn_processes_to_delete_query.order_by( + BpmnProcessModel.id.desc() # type: ignore + ).all() - tasks_to_delete = tasks_to_delete_query.all() + # delete any human task that was for a task that we deleted since they will get recreated later. + human_tasks_to_delete = HumanTaskModel.query.filter( + HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore + ).all() - # delete any later tasks from to_task_model and delete bpmn processes that may be - # link directly to one of those tasks. - tasks_to_delete_guids = [t.guid for t in tasks_to_delete] - tasks_to_delete_ids = [t.id for t in tasks_to_delete] - bpmn_processes_to_delete = BpmnProcessModel.query.filter( - BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore - ).order_by(BpmnProcessModel.id.desc()).all() - human_tasks_to_delete = HumanTaskModel.query.filter( - HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore - ).all() + # ensure the correct order for foreign keys + for human_task_to_delete in human_tasks_to_delete: + db.session.delete(human_task_to_delete) + db.session.commit() + for task_to_delete in tasks_to_delete: + db.session.delete(task_to_delete) + db.session.commit() + for bpmn_process_to_delete in bpmn_processes_to_delete: + db.session.delete(bpmn_process_to_delete) + db.session.commit() + related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() + if related_human_task is not None: + db.session.delete(related_human_task) - # import pdb; pdb.set_trace() - # ensure the correct order for foreign keys - for human_task_to_delete in human_tasks_to_delete: - db.session.delete(human_task_to_delete) - db.session.commit() - for task_to_delete in tasks_to_delete: - db.session.delete(task_to_delete) - db.session.commit() - for bpmn_process_to_delete in bpmn_processes_to_delete: - db.session.delete(bpmn_process_to_delete) - db.session.commit() - - related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() - if related_human_task is not None: - db.session.delete(related_human_task) - - tasks_to_update_ids = [t.id for t in tasks_to_update] - human_tasks_to_delete = HumanTaskModel.query.filter( - HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore - ).all() - for human_task_to_delete in human_tasks_to_delete: - db.session.delete(human_task_to_delete) - db.session.commit() + tasks_to_update_ids = [t.id for t in tasks_to_update] + human_tasks_to_delete = HumanTaskModel.query.filter( + HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore + ).all() + for human_task_to_delete in human_tasks_to_delete: + db.session.delete(human_task_to_delete) + db.session.commit() for task_to_update in tasks_to_update: - print(f"task_to_update.state: {task_to_update.state}") - TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) - - for bpmn_process_to_update in bpmn_processes_to_update: - db.session.delete(bpmn_process_to_update) + TaskService.reset_task_model(task_to_update, state="FUTURE") parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first() if parent_task_model is None: @@ -1375,10 +1370,9 @@ class ProcessInstanceProcessor: state="READY", json_data_hash=parent_task_model.json_data_hash, python_env_data_hash=parent_task_model.python_env_data_hash, - commit=commit, ) for task_model in task_models_of_parent_bpmn_processes: - TaskService.reset_task_model(task_model, state="WAITING", commit=commit) + TaskService.reset_task_model(task_model, state="WAITING") bpmn_process = to_task_model.bpmn_process properties_json = copy.copy(bpmn_process.properties_json) @@ -1387,11 +1381,9 @@ class ProcessInstanceProcessor: db.session.add(bpmn_process) db.session.commit() - import pdb; pdb.set_trace() - if commit: - processor = ProcessInstanceProcessor(process_instance) - processor.save() - processor.suspend() + processor = ProcessInstanceProcessor(process_instance) + processor.save() + processor.suspend() @staticmethod def get_parser() -> MyCustomParser: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index b6ca0c73..2b480701 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -504,7 +504,6 @@ class TaskService: cls, task_model: TaskModel, state: str, - commit: Optional[bool] = True, json_data_hash: Optional[str] = None, python_env_data_hash: Optional[str] = None, ) -> None: @@ -522,18 +521,16 @@ class TaskService: task_model.start_in_seconds = None task_model.end_in_seconds = None - if commit: - db.session.add(task_model) - db.session.commit() + db.session.add(task_model) + db.session.commit() new_properties_json["state"] = getattr(TaskState, state) task_model.properties_json = new_properties_json - if commit: - # if we commit the properties json at the same time as the other items - # the json gets reset for some reason. - db.session.add(task_model) - db.session.commit() + # if we commit the properties json at the same time as the other items + # the json gets reset for some reason. + db.session.add(task_model) + db.session.commit() @classmethod def _create_task( diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 6580780b..15ec170b 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -1,6 +1,5 @@ """Test_process_instance_processor.""" from uuid import UUID -from spiffworkflow_backend.models.task_definition import TaskDefinitionModel import pytest from flask import g @@ -17,6 +16,7 @@ from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus from spiffworkflow_backend.models.task import TaskModel # noqa: F401 +from spiffworkflow_backend.models.task_definition import TaskDefinitionModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import ( @@ -298,7 +298,7 @@ class TestProcessInstanceProcessor(BaseTest): assert spiff_manual_task is not None processor.suspend() - ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.parent.id), commit=True) + ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.parent.id)) process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() processor = ProcessInstanceProcessor(process_instance) @@ -341,15 +341,12 @@ class TestProcessInstanceProcessor(BaseTest): assert len(process_instance.active_human_tasks) == 1 assert initial_human_task_id == process_instance.active_human_tasks[0].id - # import pdb; pdb.set_trace() human_task_one = process_instance.active_human_tasks[0] spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - # import pdb; pdb.set_trace() human_task_one = process_instance.active_human_tasks[0] spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - # import pdb; pdb.set_trace() # NOTES: # somehow we are hosing the task state so that when completing tasks of a subprocess, the task AFTER the subprocess task @@ -367,15 +364,12 @@ class TestProcessInstanceProcessor(BaseTest): .first() ) assert task_model_to_reset_to is not None - import pdb; pdb.set_trace() - ProcessInstanceProcessor.reset_process(process_instance, task_model_to_reset_to.guid, commit=True) - import pdb; pdb.set_trace() + ProcessInstanceProcessor.reset_process(process_instance, task_model_to_reset_to.guid) process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() processor = ProcessInstanceProcessor(process_instance) processor.resume() processor.do_engine_steps(save=True) - import pdb; pdb.set_trace() assert len(process_instance.active_human_tasks) == 1 human_task_one = process_instance.active_human_tasks[0] diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index aab94c11..ed773ef0 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -674,16 +674,16 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const canResetProcess = (_task: Task) => { + const canResetProcess = (task: Task) => { // disabling this feature for now - return false; - // return ( - // ability.can('POST', targetUris.processInstanceResetPath) && - // processInstance && - // processInstance.status === 'suspended' && - // task.state === 'READY' && - // !showingActiveTask() - // ); + // return false; + return ( + ability.can('POST', targetUris.processInstanceResetPath) && + processInstance && + processInstance.status === 'suspended' && + task.state === 'READY' && + !showingActiveTask() + ); }; const getEvents = (task: Task) => { From 2cc4ea104b75794eab9db7197acaa5e08109643c Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 3 Apr 2023 09:46:00 -0400 Subject: [PATCH 150/162] updated to use spiff main --- spiffworkflow-backend/poetry.lock | 8 ++++---- spiffworkflow-backend/pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index 3e0f4694..0d2b81e1 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1875,7 +1875,7 @@ test = ["pytest"] [[package]] name = "SpiffWorkflow" version = "1.2.1" -description = "A workflow framework and BPMN/DMN Processor" +description = "" category = "main" optional = false python-versions = "*" @@ -1889,8 +1889,8 @@ lxml = "*" [package.source] type = "git" url = "https://github.com/sartography/SpiffWorkflow" -reference = "bugfix/execute-event-gateways-on-ready" -resolved_reference = "a1795209b415037630a44522fc7cc9d6e70e50d6" +reference = "main" +resolved_reference = "e1add839ddf2512f27cd0afe681ff3e0460d6f7a" [[package]] name = "sqlalchemy" @@ -2273,7 +2273,7 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "1.1" python-versions = ">=3.9,<3.12" -content-hash = "2dc5b510dcd40c461934921401b09ce6cf9f49ddb440192e819556fbbc6cdbfc" +content-hash = "9fea44386fbab29102a051a254058909568c4ee3dbd6a402fb91aacbcf1f7fd2" [metadata.files] alabaster = [ diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index 6bdb5b0f..df2495e0 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -27,7 +27,7 @@ flask-marshmallow = "*" flask-migrate = "*" flask-restful = "*" werkzeug = "*" -SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "bugfix/execute-event-gateways-on-ready"} +SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} # SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "6cad2981712bb61eca23af1adfafce02d3277cb9"} # SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" } sentry-sdk = "^1.10" From 10fec9e8620dc6a603989adae3011526c3690200 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 3 Apr 2023 11:40:26 -0400 Subject: [PATCH 151/162] some cleanup and added back in option to get most recent tasks only --- .../routes/process_instances_controller.py | 7 +++ .../services/process_instance_processor.py | 7 ++- .../services/task_service.py | 10 +--- .../unit/test_process_instance_processor.py | 53 +++++++++---------- .../src/routes/ProcessInstanceShow.tsx | 5 +- 5 files changed, 37 insertions(+), 45 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index dd0ad195..a7dda210 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -669,6 +669,13 @@ def process_instance_task_list( task_model_query = task_model_query.filter(bpmn_process_alias.id.in_(bpmn_process_ids)) task_models = task_model_query.all() + task_model_list = {} + if most_recent_tasks_only: + for task_model in task_models: + if task_model.bpmn_identifier not in task_model_list: + task_model_list[task_model.bpmn_identifier] = task_model + + task_models = list(task_model_list.values()) if to_task_model is not None: task_models_dict = json.loads(current_app.json.dumps(task_models)) for task_model in task_models_dict: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 93f3a811..864ab2d1 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1336,13 +1336,10 @@ class ProcessInstanceProcessor: # ensure the correct order for foreign keys for human_task_to_delete in human_tasks_to_delete: db.session.delete(human_task_to_delete) - db.session.commit() for task_to_delete in tasks_to_delete: db.session.delete(task_to_delete) - db.session.commit() for bpmn_process_to_delete in bpmn_processes_to_delete: db.session.delete(bpmn_process_to_delete) - db.session.commit() related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() if related_human_task is not None: @@ -1354,10 +1351,10 @@ class ProcessInstanceProcessor: ).all() for human_task_to_delete in human_tasks_to_delete: db.session.delete(human_task_to_delete) - db.session.commit() for task_to_update in tasks_to_update: TaskService.reset_task_model(task_to_update, state="FUTURE") + db.session.bulk_save_objects(tasks_to_update) parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first() if parent_task_model is None: @@ -1371,8 +1368,10 @@ class ProcessInstanceProcessor: json_data_hash=parent_task_model.json_data_hash, python_env_data_hash=parent_task_model.python_env_data_hash, ) + db.session.add(to_task_model) for task_model in task_models_of_parent_bpmn_processes: TaskService.reset_task_model(task_model, state="WAITING") + db.session.bulk_save_objects(task_models_of_parent_bpmn_processes) bpmn_process = to_task_model.bpmn_process properties_json = copy.copy(bpmn_process.properties_json) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 2b480701..8223f5be 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -516,22 +516,14 @@ class TaskService: else: task_model.python_env_data_hash = python_env_data_hash - new_properties_json = copy.copy(task_model.properties_json) task_model.state = state task_model.start_in_seconds = None task_model.end_in_seconds = None - db.session.add(task_model) - db.session.commit() - + new_properties_json = copy.copy(task_model.properties_json) new_properties_json["state"] = getattr(TaskState, state) task_model.properties_json = new_properties_json - # if we commit the properties json at the same time as the other items - # the json gets reset for some reason. - db.session.add(task_model) - db.session.commit() - @classmethod def _create_task( cls, diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 15ec170b..ac5d3998 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -1,5 +1,6 @@ """Test_process_instance_processor.""" from uuid import UUID +from spiffworkflow_backend.models.db import db import pytest from flask import g @@ -335,7 +336,6 @@ class TestProcessInstanceProcessor(BaseTest): ) processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - # with open("before_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2)) assert len(process_instance.active_human_tasks) == 1 initial_human_task_id = process_instance.active_human_tasks[0].id assert len(process_instance.active_human_tasks) == 1 @@ -348,14 +348,6 @@ class TestProcessInstanceProcessor(BaseTest): spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - # NOTES: - # somehow we are hosing the task state so that when completing tasks of a subprocess, the task AFTER the subprocess task - # is not marked READY but instead stays as FUTURE. Running things like: - # self.last_completed_spiff_task.task_spec._update(self.last_completed_spiff_task) - # and - # self.last_completed_spiff_task.task_spec._predict(self.last_completed_spiff_task, mask=TaskState.NOT_FINISHED_MASK) - # did not help. - processor.suspend() task_model_to_reset_to = ( TaskModel.query.join(TaskDefinitionModel) @@ -366,8 +358,15 @@ class TestProcessInstanceProcessor(BaseTest): assert task_model_to_reset_to is not None ProcessInstanceProcessor.reset_process(process_instance, task_model_to_reset_to.guid) + # make sure sqlalchemy session matches current db state + db.session.expire_all() process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() processor = ProcessInstanceProcessor(process_instance) + + # make sure we reset to the task we expected + ready_or_waiting_tasks = processor.get_all_ready_or_waiting_tasks() + top_level_subprocess_script_spiff_task = next(task for task in ready_or_waiting_tasks if task.task_spec.name == "top_level_subprocess_script") + assert top_level_subprocess_script_spiff_task is not None processor.resume() processor.do_engine_steps(save=True) @@ -511,18 +510,17 @@ class TestProcessInstanceProcessor(BaseTest): f" {expected_task_data_key}." ) - # TODO: add back in when removing MAYBE and LIKELY tasks - # count_failure_message = ( - # f"{base_failure_message} There are more than 2 entries of this task in the db." - # " There should only ever be max 2." - # ) - # task_models_with_bpmn_identifier_count = ( - # TaskModel.query.join(TaskDefinitionModel) - # .filter(TaskModel.process_instance_id == process_instance_relookup.id) - # .filter(TaskDefinitionModel.bpmn_identifier == spiff_task.task_spec.name) - # .count() - # ) - # assert task_models_with_bpmn_identifier_count < 3, count_failure_message + count_failure_message = ( + f"{base_failure_message} There are more than 2 entries of this task in the db." + " There should only ever be max 2." + ) + task_models_with_bpmn_identifier_count = ( + TaskModel.query.join(TaskDefinitionModel) + .filter(TaskModel.process_instance_id == process_instance_relookup.id) + .filter(TaskDefinitionModel.bpmn_identifier == spiff_task.task_spec.name) + .count() + ) + assert task_models_with_bpmn_identifier_count < 3, count_failure_message task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() assert task_model.start_in_seconds is not None @@ -583,13 +581,12 @@ class TestProcessInstanceProcessor(BaseTest): ) assert task_bpmn_identifier in spiff_tasks_checked, message - # TODO: add back in when removing MAYBE and LIKELY tasks - # task_models_that_are_predicted_count = ( - # TaskModel.query.filter(TaskModel.process_instance_id == process_instance_relookup.id) - # .filter(TaskModel.state.in_(["LIKELY", "MAYBE"])) # type: ignore - # .count() - # ) - # assert task_models_that_are_predicted_count == 0 + task_models_that_are_predicted_count = ( + TaskModel.query.filter(TaskModel.process_instance_id == process_instance_relookup.id) + .filter(TaskModel.state.in_(["LIKELY", "MAYBE"])) # type: ignore + .count() + ) + assert task_models_that_are_predicted_count == 0 assert processor.get_data() == data_set_7 diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index ed773ef0..7ae4d4ce 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -236,8 +236,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { tasks.forEach(function getUserTasksElement(task: Task) { if (task.state === 'COMPLETED') { taskIds.completed.push(task); - } - if (task.state === 'READY' || task.state === 'WAITING') { + } else if (task.state === 'READY' || task.state === 'WAITING') { taskIds.readyOrWaiting.push(task); } return null; @@ -675,8 +674,6 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; const canResetProcess = (task: Task) => { - // disabling this feature for now - // return false; return ( ability.can('POST', targetUris.processInstanceResetPath) && processInstance && From 9655aa8a94cd719a8b422d841f93122572523d1d Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 3 Apr 2023 11:50:28 -0400 Subject: [PATCH 152/162] pyl and fixed tests --- .../routes/process_instances_controller.py | 2 +- .../unit/test_process_instance_processor.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index a7dda210..7bbec1ff 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -674,8 +674,8 @@ def process_instance_task_list( for task_model in task_models: if task_model.bpmn_identifier not in task_model_list: task_model_list[task_model.bpmn_identifier] = task_model + task_models = list(task_model_list.values()) - task_models = list(task_model_list.values()) if to_task_model is not None: task_models_dict = json.loads(current_app.json.dumps(task_models)) for task_model in task_models_dict: diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index ac5d3998..70978a97 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -1,6 +1,5 @@ """Test_process_instance_processor.""" from uuid import UUID -from spiffworkflow_backend.models.db import db import pytest from flask import g @@ -13,6 +12,7 @@ from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel +from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus @@ -365,7 +365,9 @@ class TestProcessInstanceProcessor(BaseTest): # make sure we reset to the task we expected ready_or_waiting_tasks = processor.get_all_ready_or_waiting_tasks() - top_level_subprocess_script_spiff_task = next(task for task in ready_or_waiting_tasks if task.task_spec.name == "top_level_subprocess_script") + top_level_subprocess_script_spiff_task = next( + task for task in ready_or_waiting_tasks if task.task_spec.name == "top_level_subprocess_script" + ) assert top_level_subprocess_script_spiff_task is not None processor.resume() processor.do_engine_steps(save=True) From 80aa698cab825ac1ec0ff61434f6a107d76bf71f Mon Sep 17 00:00:00 2001 From: burnettk Date: Mon, 3 Apr 2023 12:22:06 -0400 Subject: [PATCH 153/162] add users for james --- .../realm_exports/spiffworkflow-realm.json | 196 +++++++++++++++--- .../keycloak/test_user_lists/status | 7 + 2 files changed, 174 insertions(+), 29 deletions(-) diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index c6ca21ed..9bacd506 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -899,6 +899,29 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "8c6cf190-66e3-4c8d-aa06-1b9972ecd982", + "createdTimestamp" : 1680538438437, + "username" : "core6.contributor", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "core6.contributor@status.im", + "attributes" : { + "spiffworkflow-employeeid" : [ "199" ] + }, + "credentials" : [ { + "id" : "1dadc9a8-6f7d-4795-bcc7-2b9d8aacb54a", + "type" : "password", + "createdDate" : 1680538438553, + "secretData" : "{\"value\":\"YbDgbKbiIjHB76RAJN7Q1AWYkdNvDMHUC1P3RJ6AV8ASEUr6fJ8U11WroIMmkiWs1TlewJi0mF4rWBsVkLzjlg==\",\"salt\":\"BbrA/rjtvxwrZAsS3BYARA==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "7b9767ac-24dc-43b0-838f-29e16b4fd14e", "createdTimestamp" : 1675718483773, @@ -1500,8 +1523,8 @@ "notBefore" : 0, "groups" : [ ] }, { - "id" : "9a4d176c-e61e-4392-8c50-a04988606aa6", - "createdTimestamp" : 1678461818383, + "id" : "ec8a613d-de94-4696-910d-635ab0d90fc1", + "createdTimestamp" : 1680538439046, "username" : "infra6.sme", "enabled" : true, "totp" : false, @@ -1511,10 +1534,10 @@ "spiffworkflow-employeeid" : [ "212" ] }, "credentials" : [ { - "id" : "c381e58c-3e06-4e10-bd23-46f258c1c91f", + "id" : "59e02828-28cb-4555-9497-0b9f674ecd43", "type" : "password", - "createdDate" : 1678461818420, - "secretData" : "{\"value\":\"m17+awcU3Ezhfi/gBK0xyxvnGKHads95lhn7uxvEXaPCJF0ioN8C27tH1RwU1w9ptdWjWKWAM9dcimIegy7M7g==\",\"salt\":\"0kCljoos7qzCnVdv+3IMjQ==\",\"additionalParameters\":{}}", + "createdDate" : 1680538439110, + "secretData" : "{\"value\":\"DFa3Yz3ZRdFGmAFqiq6Sg+s673FFnjVGOzS/e4SnDAdv1JzavYka2QngSHDvZfi5bO7ecDE0+idwJP/vtcMjyQ==\",\"salt\":\"iSHEw6brz62W6RqGULCyug==\",\"additionalParameters\":{}}", "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" } ], "disableableCredentialTypes" : [ ], @@ -1892,6 +1915,29 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "a8f54828-b188-41e6-80a6-920cab95f7db", + "createdTimestamp" : 1680538439162, + "username" : "legal6.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "legal6.sme@status.im", + "attributes" : { + "spiffworkflow-employeeid" : [ "213" ] + }, + "credentials" : [ { + "id" : "8e70e379-7974-40b6-ba31-08a1632a1a08", + "type" : "password", + "createdDate" : 1680538439219, + "secretData" : "{\"value\":\"Mwqt3FKuQ1q+OUpb8dIOOGwTKNmVuOCBnnJhSzFHUSa/9nrfWuL2GXCspHwPnMP4fF1eEXAg5B8SBC8cL/paEQ==\",\"salt\":\"o5Sj16r/DznxOzGJi6xJJg==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "8a03f00f-310d-4bae-b918-f6f128f98095", "createdTimestamp" : 1677187934419, @@ -2168,6 +2214,29 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "b5bd1dc1-308d-4912-b3e4-92bf5fc45ed5", + "createdTimestamp" : 1680538439258, + "username" : "peopleops.partner6.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "peopleops.partner6.sme@status.im", + "attributes" : { + "spiffworkflow-employeeid" : [ "216" ] + }, + "credentials" : [ { + "id" : "c719418c-b203-4056-9e19-43c5e87d1d43", + "type" : "password", + "createdDate" : 1680538439300, + "secretData" : "{\"value\":\"pzmtPn2OllnAYKIIS2M38n0UFrtbkX5zN44DpI/PrzmnxRgT2TvlJmjCtxp5HRUi3lngT6Jdr3IvqpO5o93Y5g==\",\"salt\":\"1WKPI8ktFMZoLCAv2ir5+A==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "b57086d7-f301-4e11-ab02-60b02c79163a", "createdTimestamp" : 1680210955550, @@ -2467,6 +2536,29 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "62862d90-e996-48ac-a8ee-5af43356dca4", + "createdTimestamp" : 1680538439355, + "username" : "ppg.ba6.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "ppg.ba6.sme@status.im", + "attributes" : { + "spiffworkflow-employeeid" : [ "236" ] + }, + "credentials" : [ { + "id" : "b242e740-4d6f-412a-9719-84da41c8d1ed", + "type" : "password", + "createdDate" : 1680538439405, + "secretData" : "{\"value\":\"oveDoHPfm0m+SkrY3rLyFfIOK1tH+Fc8y5KC+CGMccNIPqLN5p7ytXcMjjcIhRdxAW9CzCGFUKhVnGAXa/PGIQ==\",\"salt\":\"kQZeYzICjjs6DO2hEgEbDw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "f56fe387-d153-42c2-880a-6726bd624bae", "createdTimestamp" : 1676302144802, @@ -2674,6 +2766,29 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "3ac1954a-713a-47c7-bd41-d618063a1053", + "createdTimestamp" : 1680538438655, + "username" : "security6.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "security6.sme@status.im", + "attributes" : { + "spiffworkflow-employeeid" : [ "211" ] + }, + "credentials" : [ { + "id" : "e3ceb7b3-617d-4e52-980c-e5edd9ba48fb", + "type" : "password", + "createdDate" : 1680538438713, + "secretData" : "{\"value\":\"iD1TfnQecNf0giE/5Ji0JQL/z91X4QmeqtiJKp/Dsfc55vPVh7llJlVygL7x2Ctcl4/+X10XgtSUkdAvdi3Tvw==\",\"salt\":\"6c0hHyISU/BOwh8vntCIfg==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "b768e3ef-f905-4493-976c-bc3408c04bec", "createdTimestamp" : 1675447832524, @@ -2800,6 +2915,29 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "654d55c5-2380-456f-a99b-936aa8cce4ee", + "createdTimestamp" : 1680538439445, + "username" : "web.project-lead", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "web.project-lead@status.im", + "attributes" : { + "spiffworkflow-employeeid" : [ "235" ] + }, + "credentials" : [ { + "id" : "c28af9d4-37bb-445a-a8cc-12a87bd8dd2c", + "type" : "password", + "createdDate" : 1680538439501, + "secretData" : "{\"value\":\"1ug7sJNXy9qUby6hABKyLJ8R0xa1pVldXFltuO6Xtqe7qIt9+eUbhN2o9dZ8vk5/aPIFaaIcQPOFZdaKOE/XWw==\",\"salt\":\"F3utYf4viApmPmC6FSZ0vA==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] } ], "scopeMappings" : [ { "clientScope" : "offline_access", @@ -4032,7 +4170,7 @@ "subType" : "authenticated", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "saml-user-property-mapper", "oidc-usermodel-attribute-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-sha256-pairwise-sub-mapper", "oidc-address-mapper", "oidc-full-name-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-property-mapper", "oidc-usermodel-attribute-mapper" ] } }, { "id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", @@ -4050,7 +4188,7 @@ "subType" : "anonymous", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "oidc-address-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper" ] } }, { "id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c", @@ -4140,7 +4278,7 @@ "internationalizationEnabled" : false, "supportedLocales" : [ ], "authenticationFlows" : [ { - "id" : "52a2585e-d5f1-418b-aaf8-0cb6b8151ac1", + "id" : "62d7bb2a-5919-48b2-a9f9-511ecf5474c7", "alias" : "Account verification options", "description" : "Method with which to verity the existing account", "providerId" : "basic-flow", @@ -4162,7 +4300,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "0f86958e-5010-413c-aabc-bc77e0973d29", + "id" : "7675760b-666a-4b8c-a9b8-da1e01c207fe", "alias" : "Authentication Options", "description" : "Authentication options.", "providerId" : "basic-flow", @@ -4191,7 +4329,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "f17c6e82-f120-4581-83bf-1b9252f26314", + "id" : "34e18ea8-f515-46dc-9dbf-5b79f8154564", "alias" : "Browser - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4213,7 +4351,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "b6cef746-aa54-4474-b3fb-cddad60cedee", + "id" : "933e581c-56d8-4614-b2a3-d2db10397ea0", "alias" : "Direct Grant - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4235,7 +4373,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "18aab899-fd48-4c40-b862-21ca89783c8c", + "id" : "0986dc8c-4bcf-477f-8ba2-3cac02ea656f", "alias" : "First broker login - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -4257,7 +4395,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "edf8650a-1745-4c0f-9bd9-7ee84d7bb85a", + "id" : "534381e4-b0b9-43b2-9ac5-9f1e006b5920", "alias" : "Handle Existing Account", "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", "providerId" : "basic-flow", @@ -4279,7 +4417,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7df66a51-caa6-4b3a-8dd9-7d9e1dbe97a9", + "id" : "922e84ab-85db-494a-8a8c-84d3b0c675f4", "alias" : "Reset - Conditional OTP", "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", "providerId" : "basic-flow", @@ -4301,7 +4439,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "13a8650b-00d1-49d4-ba19-945908dbaf40", + "id" : "24b1b409-b6fc-44dc-9a97-93b2f4a78c89", "alias" : "User creation or linking", "description" : "Flow for the existing/non-existing user alternatives", "providerId" : "basic-flow", @@ -4324,7 +4462,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "8e8ca30f-39b7-460a-9986-72483ed987b0", + "id" : "c015a916-a45b-4797-a466-2399164da6fe", "alias" : "Verify Existing Account by Re-authentication", "description" : "Reauthentication of existing account", "providerId" : "basic-flow", @@ -4346,7 +4484,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "91635e4a-d004-4529-b03f-5af754af2547", + "id" : "fc7aec31-855b-4993-b770-57660ff0524f", "alias" : "browser", "description" : "browser based authentication", "providerId" : "basic-flow", @@ -4382,7 +4520,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "4c6e7a05-f2d7-4a95-b0ea-be4979449827", + "id" : "9769d765-42c8-4391-a7ec-aa24f0e84040", "alias" : "clients", "description" : "Base authentication for clients", "providerId" : "client-flow", @@ -4418,7 +4556,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "75d2ec8a-49bb-4790-a2da-af4dd150b9bb", + "id" : "49a937cc-9d51-43d0-a379-67aaae38c51a", "alias" : "direct grant", "description" : "OpenID Connect Resource Owner Grant", "providerId" : "basic-flow", @@ -4447,7 +4585,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "a27a47d4-97bd-473f-9837-5d211c2a1f5d", + "id" : "1a766b69-7ead-442a-84a4-083cd84949cd", "alias" : "docker auth", "description" : "Used by Docker clients to authenticate against the IDP", "providerId" : "basic-flow", @@ -4462,7 +4600,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "d1c3cf02-f04b-48fc-a4b7-ffa0e5fd5b7f", + "id" : "e4ac0543-cfb6-4232-947d-52b8615e0629", "alias" : "first broker login", "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", "providerId" : "basic-flow", @@ -4485,7 +4623,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "69abc281-22aa-4cd9-8a17-27c41840146e", + "id" : "86247ee8-b507-406b-9d32-3c68c80084a5", "alias" : "forms", "description" : "Username, password, otp and other auth forms.", "providerId" : "basic-flow", @@ -4507,7 +4645,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "f47e8bfd-4f6c-44ee-b95e-2d5ae5b8fca3", + "id" : "70ef5a26-e3bb-4ba7-a05a-d205b0a3836c", "alias" : "http challenge", "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", "providerId" : "basic-flow", @@ -4529,7 +4667,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "e7f58101-c7a7-43b2-847f-ad03930d6a64", + "id" : "89abf09a-bfb4-4dea-b164-ca7c563b4009", "alias" : "registration", "description" : "registration flow", "providerId" : "basic-flow", @@ -4545,7 +4683,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "b9a709c3-1f1b-4c69-baca-4287e792f0e4", + "id" : "52d31bf0-dcb6-4b01-a252-b2ba705df036", "alias" : "registration form", "description" : "registration form", "providerId" : "form-flow", @@ -4581,7 +4719,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "5824833e-704d-4b25-8b48-44f7a5ff0584", + "id" : "22041b6b-6d9e-43eb-8d2a-94a3052c49aa", "alias" : "reset credentials", "description" : "Reset credentials for a user if they forgot their password or something", "providerId" : "basic-flow", @@ -4617,7 +4755,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "c9d37f8a-b04b-40d2-a1a3-bd0c78f4c1a8", + "id" : "153aaf25-b6d9-42b4-9740-f63c94c16626", "alias" : "saml ecp", "description" : "SAML ECP Profile Authentication Flow", "providerId" : "basic-flow", @@ -4633,13 +4771,13 @@ } ] } ], "authenticatorConfig" : [ { - "id" : "e7bb6227-7b03-4c6b-ae97-b72e69967be4", + "id" : "e0075b39-a2ad-47de-9ee6-e61073387e71", "alias" : "create unique user config", "config" : { "require.password.update.after.registration" : "false" } }, { - "id" : "5a856c63-1341-42c2-aa5b-24246324816b", + "id" : "aa24bff3-bd25-4b2a-973f-63fea5c21dd1", "alias" : "review profile config", "config" : { "update.profile.on.first.login" : "missing" diff --git a/spiffworkflow-backend/keycloak/test_user_lists/status b/spiffworkflow-backend/keycloak/test_user_lists/status index 6eb987b1..eb866ed7 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/status +++ b/spiffworkflow-backend/keycloak/test_user_lists/status @@ -17,6 +17,7 @@ core2.contributor@status.im,,156 core3.contributor@status.im,,157 core4.contributor@status.im,,158 core5.contributor@status.im,,159 +core6.contributor@status.im,core6.contributorx,199 core@status.im,,113 dao.project.lead@status.im desktop-a1.sme@status.im,,210 @@ -41,6 +42,7 @@ infra2.sme@status.im,,132 infra3.sme@status.im,,167 infra4.sme@status.im,,175 infra5.sme@status.im,,176 +infra6.sme@status.im,infra6.smex,212 jakub@status.im jamescheung@status.im,,234 jarrad@status.im @@ -53,6 +55,7 @@ legal2.sme@status.im,,165 legal3.sme@status.im,,166 legal4.sme@status.im,,177 legal5.sme@status.im,,178 +legal6.sme@status.im,legal6.smex,213 logos.program-lead@status.im,,160 manuchehr@status.im,,110 nimbus.program-lead@status.im,,161 @@ -63,6 +66,7 @@ peopleops.partner2.sme@status.im,,173 peopleops.partner3.sme@status.im,,174 peopleops.partner4.sme@status.im,,181 peopleops.partner5.sme@status.im,,182 +peopleops.partner6.sme@status.im,peopleops.partner6.smex,216 peopleops.partner@status.im,,150 peopleops.project-lead@status.im,peopleops.project-leadx,147 peopleops.talent.sme@status.im,,143 @@ -76,6 +80,7 @@ ppg.ba2.sme@status.im,,171 ppg.ba3.sme@status.im,,172 ppg.ba4.sme@status.im,,200 ppg.ba5.sme@status.im,,201 +ppg.ba6.sme@status.im,ppg.ba6.smex,236 ppg.ba@status.im,,127 sasha@status.im,,112 security-a1.sme@status.im,,206 @@ -86,5 +91,7 @@ security2.sme@status.im,,168 security3.sme@status.im,,169 security4.sme@status.im,,179 security5.sme@status.im,,180 +security6.sme@status.im,security6.smex,211 services.lead@status.im,,122 vac.program-lead@status.im,,163 +web.project-lead@status.im,web.project-leadx,235 From dba897f3bd90ab3d5f399b75f08f18a3a936a571 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 3 Apr 2023 13:46:33 -0400 Subject: [PATCH 154/162] when getting most recent tasks make sure to group by bpmn process guid as well w/ burnettk --- .../routes/process_instances_controller.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 7bbec1ff..b17869ab 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -672,8 +672,10 @@ def process_instance_task_list( task_model_list = {} if most_recent_tasks_only: for task_model in task_models: - if task_model.bpmn_identifier not in task_model_list: - task_model_list[task_model.bpmn_identifier] = task_model + bpmn_process_guid = task_model.bpmn_process_direct_parent_guid or "TOP" + row_key = f"{bpmn_process_guid}:::{task_model.bpmn_identifier}" + if row_key not in task_model_list: + task_model_list[row_key] = task_model task_models = list(task_model_list.values()) if to_task_model is not None: From 9416bb3e68e2328f4ad6a5ff32481e025eced382 Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 4 Apr 2023 01:17:02 -0400 Subject: [PATCH 155/162] update pp1 test --- spiffworkflow-frontend/cypress/pilot/pp1.cy.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spiffworkflow-frontend/cypress/pilot/pp1.cy.js b/spiffworkflow-frontend/cypress/pilot/pp1.cy.js index bcb2091f..b713e51f 100644 --- a/spiffworkflow-frontend/cypress/pilot/pp1.cy.js +++ b/spiffworkflow-frontend/cypress/pilot/pp1.cy.js @@ -68,7 +68,7 @@ describe('pp1', () => { .contains(/^Submit$/) .click(); - cy.contains('Task: Enter NDR Items', { timeout: 60000 }); + cy.contains('Task: Enter NDR-P Items', { timeout: 60000 }); cy.get('#root_0_sub_category').select('op_src'); cy.get('#root_0_item').clear().type('spiffworkflow'); cy.get('#root_0_qty').clear().type('1'); From 2befac4c2827f9da5b29663a18348366eefd1474 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 4 Apr 2023 09:13:56 -0400 Subject: [PATCH 156/162] notion 239: use onclick to submit form when saving form as draft to avoid form validations --- .../routes/tasks_controller.py | 3 +- .../services/process_instance_service.py | 25 ++++++++++------ spiffworkflow-frontend/.eslintrc.js | 7 ++++- .../src/routes/TaskShow.tsx | 30 +++++++++++++++---- 4 files changed, 49 insertions(+), 16 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 7c8973aa..9baffd25 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -428,8 +428,9 @@ def _task_submit_shared( if save_as_draft: task_model = _get_task_model_from_guid_or_raise(task_guid, process_instance_id) + ProcessInstanceService.update_form_task_data(processor, spiff_task, body, g.user) json_data_dict = TaskService.update_task_data_on_task_model_and_return_dict_if_updated( - task_model, body, "json_data_hash" + task_model, spiff_task.data, "json_data_hash" ) if json_data_dict is not None: TaskService.insert_or_update_json_data_dict(json_data_dict) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index 39f6de15..b8754352 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -325,6 +325,21 @@ class ProcessInstanceService: cls.replace_file_data_with_digest_references(data, models) + @staticmethod + def update_form_task_data( + processor: ProcessInstanceProcessor, + spiff_task: SpiffTask, + data: dict[str, Any], + user: UserModel, + ) -> None: + AuthorizationService.assert_user_can_complete_spiff_task(processor.process_instance_model.id, spiff_task, user) + ProcessInstanceService.save_file_data_and_replace_with_digest_references( + data, + processor.process_instance_model.id, + ) + dot_dct = ProcessInstanceService.create_dot_dict(data) + spiff_task.update_data(dot_dct) + @staticmethod def complete_form_task( processor: ProcessInstanceProcessor, @@ -338,15 +353,7 @@ class ProcessInstanceService: Abstracted here because we need to do it multiple times when completing all tasks in a multi-instance task. """ - AuthorizationService.assert_user_can_complete_spiff_task(processor.process_instance_model.id, spiff_task, user) - - ProcessInstanceService.save_file_data_and_replace_with_digest_references( - data, - processor.process_instance_model.id, - ) - - dot_dct = ProcessInstanceService.create_dot_dict(data) - spiff_task.update_data(dot_dct) + ProcessInstanceService.update_form_task_data(processor, spiff_task, data, user) # ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store. processor.complete_task(spiff_task, human_task, user=user) diff --git a/spiffworkflow-frontend/.eslintrc.js b/spiffworkflow-frontend/.eslintrc.js index b6829ff4..5ed20900 100644 --- a/spiffworkflow-frontend/.eslintrc.js +++ b/spiffworkflow-frontend/.eslintrc.js @@ -25,6 +25,10 @@ module.exports = { }, plugins: ['react', 'sonarjs', '@typescript-eslint'], rules: { + // according to https://github.com/typescript-eslint/typescript-eslint/issues/2621, You should turn off the eslint core rule and turn on the typescript-eslint rule + // but not sure which of the above "extends" statements is maybe bringing in eslint core + 'no-shadow': 'off', + '@typescript-eslint/no-shadow': ['error'], 'jest/expect-expect': 'off', 'react/jsx-no-bind': 'off', 'jsx-a11y/no-autofocus': 'off', @@ -37,7 +41,8 @@ module.exports = { 'react/react-in-jsx-scope': 'off', 'react/require-default-props': 'off', 'import/prefer-default-export': 'off', - 'no-unused-vars': [ + 'no-unused-vars': 'off', + '@typescript-eslint/no-unused-vars': [ 'error', { destructuredArrayIgnorePattern: '^_', diff --git a/spiffworkflow-frontend/src/routes/TaskShow.tsx b/spiffworkflow-frontend/src/routes/TaskShow.tsx index 5362cf32..d343d1a1 100644 --- a/spiffworkflow-frontend/src/routes/TaskShow.tsx +++ b/spiffworkflow-frontend/src/routes/TaskShow.tsx @@ -29,6 +29,11 @@ class UnexpectedHumanTaskType extends Error { } } +enum FormSubmitType { + Default, + Draft, +} + export default function TaskShow() { const [task, setTask] = useState(null); const [userTasks] = useState(null); @@ -36,6 +41,9 @@ export default function TaskShow() { const navigate = useNavigate(); const [disabled, setDisabled] = useState(false); + // save current form data so that we can avoid validations in certain situations + const [currentFormObject, setCurrentFormObject] = useState({}); + const { addError, removeError } = useAPIError(); // eslint-disable-next-line sonarjs/no-duplicate-string @@ -87,14 +95,16 @@ export default function TaskShow() { } }; - const handleFormSubmit = (formObject: any, event: any) => { + const handleFormSubmit = ( + formObject: any, + _event: any, + submitType: FormSubmitType = FormSubmitType.Default + ) => { if (disabled) { return; } - const submitButtonId = event.nativeEvent.submitter.id; let queryParams = ''; - console.log('submitButtonId', submitButtonId); - if (submitButtonId === 'save-as-draft-button') { + if (submitType === FormSubmitType.Draft) { queryParams = '?save_as_draft=true'; } setDisabled(true); @@ -200,6 +210,11 @@ export default function TaskShow() { return errors; }; + const updateFormData = (formObject: any) => { + currentFormObject.formData = formObject.formData; + setCurrentFormObject(currentFormObject); + }; + const formElement = () => { if (!task) { return null; @@ -250,10 +265,12 @@ export default function TaskShow() { } else if (task.type === 'User Task') { saveAsDraftButton = ( @@ -287,7 +304,10 @@ export default function TaskShow() { schema={jsonSchema} uiSchema={formUiSchema} validator={validator} + onChange={updateFormData} customValidate={customValidate} + omitExtraData + liveOmit > {reactFragmentToHideSubmitButton} From 4bd63bac7809a1e5bcf262a6b7eb895afdd07235 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 4 Apr 2023 14:47:33 -0400 Subject: [PATCH 157/162] Fixing lane error in the spiff-bpmn-io library --- spiffworkflow-frontend/package-lock.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-frontend/package-lock.json b/spiffworkflow-frontend/package-lock.json index adf99f3b..45dd194e 100644 --- a/spiffworkflow-frontend/package-lock.json +++ b/spiffworkflow-frontend/package-lock.json @@ -8065,7 +8065,7 @@ }, "node_modules/bpmn-js-spiffworkflow": { "version": "0.0.8", - "resolved": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#82260144f90d9a311155066d637664d9e2a3f02e", + "resolved": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#2822ba1644acc8c8e8c055702b3bcade0f4573be", "license": "MIT", "dependencies": { "inherits": "^2.0.4", @@ -38214,7 +38214,7 @@ } }, "bpmn-js-spiffworkflow": { - "version": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#82260144f90d9a311155066d637664d9e2a3f02e", + "version": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#2822ba1644acc8c8e8c055702b3bcade0f4573be", "from": "bpmn-js-spiffworkflow@sartography/bpmn-js-spiffworkflow#main", "requires": { "inherits": "^2.0.4", From f9000d4dc540eb749bd1ded4969596694e125b63 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 4 Apr 2023 15:48:34 -0400 Subject: [PATCH 158/162] make sure we prcoess the parents and children of failing spiff tasks as well since these do not go through the normal flow w/ burnettk --- .../routes/process_instances_controller.py | 12 +++++++----- .../services/workflow_execution_service.py | 9 ++------- .../src/routes/ProcessInstanceShow.tsx | 5 +++++ 3 files changed, 14 insertions(+), 12 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index b17869ab..432bd9c4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -650,10 +650,12 @@ def process_instance_task_list( .add_columns( BpmnProcessDefinitionModel.bpmn_identifier.label("bpmn_process_definition_identifier"), # type: ignore BpmnProcessDefinitionModel.bpmn_name.label("bpmn_process_definition_name"), # type: ignore - direct_parent_bpmn_process_alias.guid.label("bpmn_process_direct_parent_guid"), - direct_parent_bpmn_process_definition_alias.bpmn_identifier.label( - "bpmn_process_direct_parent_bpmn_identifier" - ), + bpmn_process_alias.guid.label("bpmn_process_guid"), + # not sure why we needed these + # direct_parent_bpmn_process_alias.guid.label("bpmn_process_direct_parent_guid"), + # direct_parent_bpmn_process_definition_alias.bpmn_identifier.label( + # "bpmn_process_direct_parent_bpmn_identifier" + # ), TaskDefinitionModel.bpmn_identifier, TaskDefinitionModel.bpmn_name, TaskDefinitionModel.typename, @@ -672,7 +674,7 @@ def process_instance_task_list( task_model_list = {} if most_recent_tasks_only: for task_model in task_models: - bpmn_process_guid = task_model.bpmn_process_direct_parent_guid or "TOP" + bpmn_process_guid = task_model.bpmn_process_guid or "TOP" row_key = f"{bpmn_process_guid}:::{task_model.bpmn_identifier}" if row_key not in task_model_list: task_model_list[row_key] = task_model diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index babff151..5398fff4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -92,6 +92,8 @@ class TaskModelSavingDelegate(EngineStepDelegate): if hasattr(script_engine, "failing_spiff_task") and script_engine.failing_spiff_task is not None: failing_spiff_task = script_engine.failing_spiff_task self.task_service.update_task_model_with_spiff_task(failing_spiff_task, task_failed=True) + self.task_service.process_spiff_task_parent_subprocess_tasks(failing_spiff_task) + self.task_service.process_spiff_task_children(failing_spiff_task) self.task_service.save_objects_to_database() @@ -101,13 +103,6 @@ class TaskModelSavingDelegate(EngineStepDelegate): def after_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> None: if self._should_update_task_model(): - # TODO: also include children of the last task processed. This may help with task resets - # if we have to set their states to FUTURE. - # excludes FUTURE and COMPLETED. the others were required to get PP1 to go to completion. - # for waiting_spiff_task in bpmn_process_instance.get_tasks( - # TaskState.WAITING | TaskState.CANCELLED | TaskState.READY | TaskState.MAYBE | TaskState.LIKELY - # ): - # self._update_task_model_with_spiff_task(waiting_spiff_task) if self.last_completed_spiff_task is not None: self.task_service.process_spiff_task_parent_subprocess_tasks(self.last_completed_spiff_task) self.task_service.process_spiff_task_children(self.last_completed_spiff_task) diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 7ae4d4ce..29d4bedc 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -998,6 +998,11 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ): {taskToUse.state} {taskDisplayButtons(taskToUse)} +
+ + Guid: {taskToUse.guid} + +
{taskToUse.state === 'COMPLETED' ? (
From 9514f84fe109a8df246eef9445a3f77e0394c69a Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 4 Apr 2023 17:18:11 -0400 Subject: [PATCH 159/162] revert recent updates to bpmn-js --- spiffworkflow-frontend/package-lock.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-frontend/package-lock.json b/spiffworkflow-frontend/package-lock.json index 45dd194e..027eec9f 100644 --- a/spiffworkflow-frontend/package-lock.json +++ b/spiffworkflow-frontend/package-lock.json @@ -8065,7 +8065,7 @@ }, "node_modules/bpmn-js-spiffworkflow": { "version": "0.0.8", - "resolved": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#2822ba1644acc8c8e8c055702b3bcade0f4573be", + "resolved": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#1fa1f8741605ae1e1cd6d645d07e03953943c8cc", "license": "MIT", "dependencies": { "inherits": "^2.0.4", @@ -38214,7 +38214,7 @@ } }, "bpmn-js-spiffworkflow": { - "version": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#2822ba1644acc8c8e8c055702b3bcade0f4573be", + "version": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#1fa1f8741605ae1e1cd6d645d07e03953943c8cc", "from": "bpmn-js-spiffworkflow@sartography/bpmn-js-spiffworkflow#main", "requires": { "inherits": "^2.0.4", From b6b3662aa8dd431db3547a1842b93517889c7ecd Mon Sep 17 00:00:00 2001 From: Dan Date: Wed, 5 Apr 2023 11:14:20 -0400 Subject: [PATCH 160/162] Fix for lanes in bpmn-js --- spiffworkflow-frontend/package-lock.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-frontend/package-lock.json b/spiffworkflow-frontend/package-lock.json index 027eec9f..96fd9aa6 100644 --- a/spiffworkflow-frontend/package-lock.json +++ b/spiffworkflow-frontend/package-lock.json @@ -8065,7 +8065,7 @@ }, "node_modules/bpmn-js-spiffworkflow": { "version": "0.0.8", - "resolved": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#1fa1f8741605ae1e1cd6d645d07e03953943c8cc", + "resolved": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#6391337a86a6342fcfcfcd3e57d39cb61c7af668", "license": "MIT", "dependencies": { "inherits": "^2.0.4", @@ -38214,7 +38214,7 @@ } }, "bpmn-js-spiffworkflow": { - "version": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#1fa1f8741605ae1e1cd6d645d07e03953943c8cc", + "version": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#6391337a86a6342fcfcfcd3e57d39cb61c7af668", "from": "bpmn-js-spiffworkflow@sartography/bpmn-js-spiffworkflow#main", "requires": { "inherits": "^2.0.4", From 10e665ac481dc7874ab7bdaa87882a0ea27b2455 Mon Sep 17 00:00:00 2001 From: jbirddog <100367399+jbirddog@users.noreply.github.com> Date: Wed, 5 Apr 2023 14:27:20 -0400 Subject: [PATCH 161/162] Type ahead widget (#205) --- .gitignore | 1 + .../src/spiffworkflow_backend/api.yml | 31 ++++++++ .../spiffworkflow_backend/config/default.py | 4 + .../routes/connector_proxy_controller.py | 25 +++++++ .../src/routes/TaskShow.tsx | 73 ++++++++++++++++++- 5 files changed, 133 insertions(+), 1 deletion(-) create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/routes/connector_proxy_controller.py diff --git a/.gitignore b/.gitignore index 24a0ada5..22f7178f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ pyrightconfig.json .idea/ t +*~ .dccache *~ \ No newline at end of file diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 373f1831..7b97781e 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -2089,6 +2089,37 @@ paths: schema: $ref: "#/components/schemas/Secret" + /connector-proxy/type-ahead/{category}: + parameters: + - name: category + in: path + required: true + description: The category for the type-ahead search + schema: + type: string + - name: prefix + in: query + required: true + description: The prefix to search for + schema: + type: string + - name: limit + in: query + required: true + description: The maximum number of search results + schema: + type: integer + get: + operationId: spiffworkflow_backend.routes.connector_proxy_controller.type_ahead + summary: Return type ahead search results + tags: + - Type Ahead + responses: + "200": + description: We return type ahead search results + #content: + # - application/json + components: securitySchemes: jwt: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py index 4ba0efd9..dec4c444 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py @@ -41,6 +41,10 @@ SPIFFWORKFLOW_BACKEND_URL = environ.get("SPIFFWORKFLOW_BACKEND_URL", default="ht SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = environ.get( "SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL", default="http://localhost:7004" ) +SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_TYPE_AHEAD_URL = environ.get( + "SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_TYPE_AHEAD_URL", + default="https://emehvlxpwodjawtgi7ctkbvpse0vmaow.lambda-url.us-east-1.on.aws", +) # Open ID server # use "http://localhost:7000/openid" for running with simple openid diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/connector_proxy_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/connector_proxy_controller.py new file mode 100644 index 00000000..45c0bd28 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/connector_proxy_controller.py @@ -0,0 +1,25 @@ +from typing import Any + +import flask.wrappers +import requests +from flask import current_app +from flask.wrappers import Response + + +def connector_proxy_type_ahead_url() -> Any: + """Returns the connector proxy type ahead url.""" + return current_app.config["SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_TYPE_AHEAD_URL"] + + +def type_ahead(category: str, prefix: str, limit: int) -> flask.wrappers.Response: + url = f"{connector_proxy_type_ahead_url()}/v1/type-ahead/{category}?prefix={prefix}&limit={limit}" + + proxy_response = requests.get(url) + status = proxy_response.status_code + if status // 100 == 2: + response = proxy_response.text + else: + # supress pop up errors on the client + status = 200 + response = "[]" + return Response(response, status=status, mimetype="application/json") diff --git a/spiffworkflow-frontend/src/routes/TaskShow.tsx b/spiffworkflow-frontend/src/routes/TaskShow.tsx index d343d1a1..863ee5f3 100644 --- a/spiffworkflow-frontend/src/routes/TaskShow.tsx +++ b/spiffworkflow-frontend/src/routes/TaskShow.tsx @@ -1,4 +1,4 @@ -import React, { useEffect, useState } from 'react'; +import React, { useEffect, useRef, useState } from 'react'; import { useNavigate, useParams } from 'react-router-dom'; import validator from '@rjsf/validator-ajv8'; @@ -8,6 +8,7 @@ import { Tabs, Grid, Column, + ComboBox, Button, ButtonSet, // @ts-ignore @@ -22,6 +23,73 @@ import { modifyProcessIdentifierForPathParam } from '../helpers'; import { ProcessInstanceTask } from '../interfaces'; import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; +// TODO: move this somewhere else +function TypeAheadWidget({ + id, + onChange, + options: { category, itemFormat }, +}: { + id: string; + onChange: any; + options: any; +}) { + const pathForCategory = (inputText: string) => { + return `/connector-proxy/type-ahead/${category}?prefix=${inputText}&limit=100`; + }; + + const lastSearchTerm = useRef(''); + const [items, setItems] = useState([]); + const [selectedItem, setSelectedItem] = useState(null); + const itemFormatRegex = /[^{}]+(?=})/g; + const itemFormatSubstitutions = itemFormat.match(itemFormatRegex); + + const itemToString = (item: any) => { + if (!item) { + return null; + } + + let str = itemFormat; + itemFormatSubstitutions.forEach((key: string) => { + str = str.replace(`{${key}}`, item[key]); + }); + return str; + }; + + const handleTypeAheadResult = (result: any, inputText: string) => { + if (lastSearchTerm.current === inputText) { + setItems(result); + } + }; + + const typeAheadSearch = (inputText: string) => { + if (inputText) { + lastSearchTerm.current = inputText; + // TODO: check cache of prefixes -> results + HttpService.makeCallToBackend({ + path: pathForCategory(inputText), + successCallback: (result: any) => + handleTypeAheadResult(result, inputText), + }); + } + }; + + return ( + { + setSelectedItem(event.selectedItem); + onChange(itemToString(event.selectedItem)); + }} + id={id} + items={items} + itemToString={itemToString} + placeholder={`Start typing to search for ${category}...`} + titleText={`Type ahead search for ${category}`} + selectedItem={selectedItem} + /> + ); +} + class UnexpectedHumanTaskType extends Error { constructor(message: string) { super(message); @@ -294,6 +362,8 @@ export default function TaskShow() { return getFieldsWithDateValidations(jsonSchema, formData, errors); }; + const widgets = { typeAhead: TypeAheadWidget }; + return ( @@ -303,6 +373,7 @@ export default function TaskShow() { onSubmit={handleFormSubmit} schema={jsonSchema} uiSchema={formUiSchema} + widgets={widgets} validator={validator} onChange={updateFormData} customValidate={customValidate} From 21819642cefa7c7368c8d399491872ed74e14639 Mon Sep 17 00:00:00 2001 From: burnettk Date: Wed, 5 Apr 2023 20:20:08 -0400 Subject: [PATCH 162/162] example curl for message start event --- .../src/spiffworkflow_backend/routes/messages_controller.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py index 7cd65a37..f7db74db 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py @@ -59,6 +59,12 @@ def message_instance_list( # payload: dict, # process_instance_id: Optional[int], # } +# +# For example: +# curl 'http://localhost:7000/v1.0/messages/gogo' \ +# -H 'authorization: Bearer [FIXME]' \ +# -H 'content-type: application/json' \ +# --data-raw '{"payload":{"sure": "yes", "food": "spicy"}}' def message_send( message_name: str, body: Dict[str, Any],