diff --git a/.github/workflows/backend_tests.yml b/.github/workflows/backend_tests.yml index c9c6cbe9f..9e7cb8212 100644 --- a/.github/workflows/backend_tests.yml +++ b/.github/workflows/backend_tests.yml @@ -276,7 +276,7 @@ jobs: uses: codecov/codecov-action@v3.1.3 - name: SonarCloud Scan - uses: sonarsource/sonarcloud-github-action@v1.8 + uses: sonarsource/sonarcloud-github-action@v1.9 # thought about just skipping dependabot # if: ${{ github.actor != 'dependabot[bot]' }} # but figured all pull requests seems better, since none of them will have access to sonarcloud. diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py index 923ff1f54..7c4ef2058 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py @@ -21,10 +21,13 @@ from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.specs.base import TaskSpec # type: ignore from SpiffWorkflow.task import Task # type: ignore +from spiffworkflow_backend.models.task import TaskModel # noqa: F401 from spiffworkflow_backend.services.authentication_service import NotAuthorizedError from spiffworkflow_backend.services.authentication_service import TokenInvalidError from spiffworkflow_backend.services.authentication_service import TokenNotProvidedError from spiffworkflow_backend.services.authentication_service import UserNotLoggedInError +from spiffworkflow_backend.services.task_service import TaskModelException +from spiffworkflow_backend.services.task_service import TaskService api_error_blueprint = Blueprint("api_error_blueprint", __name__) @@ -36,17 +39,17 @@ class ApiError(Exception): error_code: str message: str - error_line: str = "" - error_type: str = "" - file_name: str = "" - line_number: int = 0 - offset: int = 0 + error_line: str | None = "" + error_type: str | None = "" + file_name: str | None = "" + line_number: int | None = 0 + offset: int | None = 0 sentry_link: str | None = None - status_code: int = 400 - tag: str = "" + status_code: int | None = 400 + tag: str | None = "" task_data: dict | str | None = field(default_factory=dict) - task_id: str = "" - task_name: str = "" + task_id: str | None = "" + task_name: str | None = "" task_trace: list | None = field(default_factory=list) def __str__(self) -> str: @@ -96,6 +99,44 @@ class ApiError(Exception): return instance + @classmethod + def from_task_model( + cls, + error_code: str, + message: str, + task_model: TaskModel, + status_code: int | None = 400, + line_number: int | None = 0, + offset: int | None = 0, + error_type: str | None = "", + error_line: str | None = "", + task_trace: list | None = None, + ) -> ApiError: + """Constructs an API Error with details pulled from the current task model.""" + instance = cls(error_code, message, status_code=status_code) + task_definition = task_model.task_definition + instance.task_id = task_definition.bpmn_identifier + instance.task_name = task_definition.bpmn_name or "" + instance.line_number = line_number + instance.offset = offset + instance.error_type = error_type + instance.error_line = error_line + if task_trace: + instance.task_trace = task_trace + else: + instance.task_trace = TaskModelException.get_task_trace(task_model) + + try: + spec_reference = TaskService.get_spec_reference_from_bpmn_process(task_model.bpmn_process) + instance.file_name = spec_reference.file_name + except Exception as exception: + current_app.logger.error(exception) + + # Assure that there is nothing in the json data that can't be serialized. + instance.task_data = ApiError.remove_unserializeable_from_dict(task_model.get_data()) + + return instance + @staticmethod def remove_unserializeable_from_dict(my_dict: dict) -> dict: """Removes unserializeable from dict.""" @@ -157,6 +198,18 @@ class ApiError(Exception): error_line=exp.error_line, task_trace=exp.task_trace, ) + elif isinstance(exp, TaskModelException): + # Note that WorkflowDataExceptions are also WorkflowTaskExceptions + return ApiError.from_task_model( + error_code, + message + ". " + str(exp), + exp.task_model, + line_number=exp.line_number, + offset=exp.offset, + error_type=exp.error_type, + error_line=exp.error_line, + task_trace=exp.task_trace, + ) elif isinstance(exp, WorkflowException) and exp.task_spec: msg = message + ". " + str(exp) return ApiError.from_task_spec(error_code, msg, exp.task_spec) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index 9fb8d6ffa..773ed2406 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -76,6 +76,19 @@ class TaskModel(SpiffworkflowBaseDBModel): data: Optional[dict] = None + # these are here to be compatible with task api + form_schema: Optional[dict] = None + form_ui_schema: Optional[dict] = None + process_model_display_name: Optional[str] = None + process_model_identifier: Optional[str] = None + typename: Optional[str] = None + can_complete: Optional[bool] = None + extensions: Optional[dict] = None + name_for_display: Optional[str] = None + + def get_data(self) -> dict: + return {**self.python_env_data(), **self.json_data()} + def python_env_data(self) -> dict: return JsonDataModel.find_data_dict_by_hash(self.python_env_data_hash) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 9a1796ff1..763917088 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -43,7 +43,6 @@ from spiffworkflow_backend.models.process_instance import ( ) from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.models.process_model import ProcessModelInfo -from spiffworkflow_backend.models.task import Task from spiffworkflow_backend.models.task import TaskModel # noqa: F401 from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.routes.process_api_blueprint import ( @@ -54,6 +53,8 @@ from spiffworkflow_backend.routes.process_api_blueprint import ( ) from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model from spiffworkflow_backend.services.authorization_service import AuthorizationService +from spiffworkflow_backend.services.authorization_service import HumanTaskNotFoundError +from spiffworkflow_backend.services.authorization_service import UserDoesNotHaveAccessToTaskError from spiffworkflow_backend.services.file_system_service import FileSystemService from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, @@ -64,8 +65,10 @@ from spiffworkflow_backend.services.process_instance_queue_service import ( from spiffworkflow_backend.services.process_instance_service import ( ProcessInstanceService, ) +from spiffworkflow_backend.services.process_instance_tmp_service import ProcessInstanceTmpService from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.spec_file_service import SpecFileService +from spiffworkflow_backend.services.task_service import TaskModelException from spiffworkflow_backend.services.task_service import TaskService @@ -217,7 +220,7 @@ def task_data_update( ) if json_data_dict is not None: TaskService.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict}) - TaskService.add_event_to_process_instance( + ProcessInstanceTmpService.add_event_to_process_instance( process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_guid ) try: @@ -265,7 +268,6 @@ def manual_complete_task( def task_show(process_instance_id: int, task_guid: str = "next") -> flask.wrappers.Response: - """Task_show.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) if process_instance.status == ProcessInstanceStatus.suspended.value: @@ -279,17 +281,12 @@ def task_show(process_instance_id: int, task_guid: str = "next") -> flask.wrappe process_instance.process_model_identifier, ) - # _find_human_task_or_raise(process_instance_id, task_guid) - form_schema_file_name = "" form_ui_schema_file_name = "" - processor = ProcessInstanceProcessor(process_instance) - if task_guid == "next": - spiff_task = processor.next_task() - task_guid = spiff_task.id - else: - spiff_task = _get_spiff_task_from_process_instance(task_guid, process_instance, processor=processor) - extensions = spiff_task.task_spec.extensions + + task_model = _get_task_model_from_guid_or_raise(task_guid, process_instance_id) + task_definition = task_model.task_definition + extensions = TaskService.get_extensions_from_task_model(task_model) if "properties" in extensions: properties = extensions["properties"] @@ -297,25 +294,40 @@ def task_show(process_instance_id: int, task_guid: str = "next") -> flask.wrappe form_schema_file_name = properties["formJsonSchemaFilename"] if "formUiSchemaFilename" in properties: form_ui_schema_file_name = properties["formUiSchemaFilename"] - task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task) - task.data = spiff_task.data - task.process_model_display_name = process_model.display_name - task.process_model_identifier = process_model.id + + can_complete = False + try: + AuthorizationService.assert_user_can_complete_task( + process_instance.id, task_definition.bpmn_identifier, g.user + ) + can_complete = True + except HumanTaskNotFoundError: + can_complete = False + except UserDoesNotHaveAccessToTaskError: + can_complete = False + + task_model.data = task_model.get_data() + task_model.process_model_display_name = process_model.display_name + task_model.process_model_identifier = process_model.id + task_model.typename = task_definition.typename + task_model.can_complete = can_complete + task_process_identifier = task_model.bpmn_process.bpmn_process_definition.bpmn_identifier + task_model.name_for_display = TaskService.get_name_for_display(task_definition) process_model_with_form = process_model refs = SpecFileService.get_references_for_process(process_model_with_form) all_processes = [i.identifier for i in refs] - if task.process_identifier not in all_processes: - top_process_name = processor.find_process_model_process_name_by_task_name(task.process_identifier) + if task_process_identifier not in all_processes: + top_bpmn_process = TaskService.bpmn_process_for_called_activity_or_top_level_process(task_model) bpmn_file_full_path = ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier( - top_process_name + top_bpmn_process.bpmn_process_definition.bpmn_identifier ) relative_path = os.path.relpath(bpmn_file_full_path, start=FileSystemService.root_path()) process_model_relative_path = os.path.dirname(relative_path) process_model_with_form = ProcessModelService.get_process_model_from_relative_path(process_model_relative_path) - if task.type == "User Task": + if task_definition.typename == "UserTask": if not form_schema_file_name: raise ( ApiError( @@ -330,79 +342,60 @@ def task_show(process_instance_id: int, task_guid: str = "next") -> flask.wrappe form_dict = _prepare_form_data( form_schema_file_name, - spiff_task, + task_model, process_model_with_form, ) - if task.data: - _update_form_schema_with_task_data_as_needed(form_dict, task, spiff_task) + if task_model.data: + _update_form_schema_with_task_data_as_needed(form_dict, task_model) if form_dict: - task.form_schema = form_dict + task_model.form_schema = form_dict if form_ui_schema_file_name: ui_form_contents = _prepare_form_data( form_ui_schema_file_name, - task, + task_model, process_model_with_form, ) if ui_form_contents: - task.form_ui_schema = ui_form_contents + task_model.form_ui_schema = ui_form_contents - _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task) - _render_instructions_for_end_user(spiff_task, task) - return make_response(jsonify(task), 200) + _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task_model) + _render_instructions_for_end_user(task_model, extensions) + task_model.extensions = extensions + return make_response(jsonify(task_model), 200) -def _render_instructions_for_end_user(spiff_task: SpiffTask, task: Task) -> str: +def _render_instructions_for_end_user(task_model: TaskModel, extensions: Optional[dict] = None) -> str: """Assure any instructions for end user are processed for jinja syntax.""" - if task.properties and "instructionsForEndUser" in task.properties: - if task.properties["instructionsForEndUser"]: + if extensions is None: + extensions = TaskService.get_extensions_from_task_model(task_model) + if extensions and "instructionsForEndUser" in extensions: + if extensions["instructionsForEndUser"]: try: - instructions = _render_jinja_template(task.properties["instructionsForEndUser"], spiff_task) - task.properties["instructionsForEndUser"] = instructions + instructions = _render_jinja_template(extensions["instructionsForEndUser"], task_model) + extensions["instructionsForEndUser"] = instructions return instructions - except WorkflowTaskException as wfe: + except TaskModelException as wfe: wfe.add_note("Failed to render instructions for end user.") raise ApiError.from_workflow_exception("instructions_error", str(wfe), exp=wfe) from wfe return "" -def process_data_show( - process_instance_id: int, - process_data_identifier: str, - modified_process_model_identifier: str, -) -> flask.wrappers.Response: - """Process_data_show.""" - process_instance = _find_process_instance_by_id_or_raise(process_instance_id) - processor = ProcessInstanceProcessor(process_instance) - all_process_data = processor.get_data() - process_data_value = None - if process_data_identifier in all_process_data: - process_data_value = all_process_data[process_data_identifier] - - return make_response( - jsonify( - { - "process_data_identifier": process_data_identifier, - "process_data_value": process_data_value, - } - ), - 200, - ) - - -def _interstitial_stream(process_instance_id: int) -> Generator[str, Optional[str], None]: - process_instance = _find_process_instance_by_id_or_raise(process_instance_id) +def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[str, Optional[str], None]: processor = ProcessInstanceProcessor(process_instance) reported_ids = [] # bit of an issue with end tasks showing as getting completed twice. spiff_task = processor.next_task() + task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() last_task = None while last_task != spiff_task: task = ProcessInstanceService.spiff_task_to_api_task(processor, processor.next_task()) - instructions = _render_instructions_for_end_user(spiff_task, task) + extensions = TaskService.get_extensions_from_task_model(task_model) + instructions = _render_instructions_for_end_user(task_model, extensions) if instructions and spiff_task.id not in reported_ids: reported_ids.append(spiff_task.id) + task.properties = extensions yield f"data: {current_app.json.dumps(task)} \n\n" last_task = spiff_task try: @@ -425,6 +418,7 @@ def _interstitial_stream(process_instance_id: int) -> Generator[str, Optional[st # Note, this has to be done in case someone leaves the page, # which can otherwise cancel this function and leave completed tasks un-registered. spiff_task = processor.next_task() + task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() # Always provide some response, in the event no instructions were provided. if len(reported_ids) == 0: @@ -432,10 +426,16 @@ def _interstitial_stream(process_instance_id: int) -> Generator[str, Optional[st yield f"data: {current_app.json.dumps(task)} \n\n" +def _dequeued_interstitial_stream(process_instance_id: int) -> Generator[str, Optional[str], None]: + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + with ProcessInstanceQueueService.dequeued(process_instance): + yield from _interstitial_stream(process_instance) + + def interstitial(process_instance_id: int) -> Response: """A Server Side Events Stream for watching the execution of engine tasks.""" return Response( - stream_with_context(_interstitial_stream(process_instance_id)), + stream_with_context(_dequeued_interstitial_stream(process_instance_id)), mimetype="text/event-stream", headers={"X-Accel-Buffering": "no"}, ) @@ -461,7 +461,7 @@ def _task_submit_shared( processor = ProcessInstanceProcessor(process_instance) spiff_task = _get_spiff_task_from_process_instance(task_guid, process_instance, processor=processor) - AuthorizationService.assert_user_can_complete_spiff_task(process_instance.id, spiff_task, principal.user) + AuthorizationService.assert_user_can_complete_task(process_instance.id, spiff_task.task_spec.name, principal.user) if spiff_task.state != TaskState.READY: raise ( @@ -649,14 +649,14 @@ def _get_tasks( return make_response(jsonify(response_json), 200) -def _prepare_form_data(form_file: str, spiff_task: SpiffTask, process_model: ProcessModelInfo) -> dict: +def _prepare_form_data(form_file: str, task_model: TaskModel, process_model: ProcessModelInfo) -> dict: """Prepare_form_data.""" - if spiff_task.data is None: + if task_model.data is None: return {} file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8") try: - form_contents = _render_jinja_template(file_contents, spiff_task) + form_contents = _render_jinja_template(file_contents, task_model) try: # form_contents is a str hot_dict: dict = json.loads(form_contents) @@ -669,21 +669,21 @@ def _prepare_form_data(form_file: str, spiff_task: SpiffTask, process_model: Pro status_code=400, ) ) from exception - except WorkflowTaskException as wfe: + except TaskModelException as wfe: wfe.add_note(f"Error in Json Form File '{form_file}'") api_error = ApiError.from_workflow_exception("instructions_error", str(wfe), exp=wfe) api_error.file_name = form_file raise api_error -def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) -> str: +def _render_jinja_template(unprocessed_template: str, task_model: TaskModel) -> str: """Render_jinja_template.""" jinja_environment = jinja2.Environment(autoescape=True, lstrip_blocks=True, trim_blocks=True) try: template = jinja_environment.from_string(unprocessed_template) - return template.render(**spiff_task.data) + return template.render(**(task_model.get_data())) except jinja2.exceptions.TemplateError as template_error: - wfe = WorkflowTaskException(str(template_error), task=spiff_task, exception=template_error) + wfe = TaskModelException(str(template_error), task_model=task_model, exception=template_error) if isinstance(template_error, TemplateSyntaxError): wfe.line_number = template_error.lineno wfe.error_line = template_error.source.split("\n")[template_error.lineno - 1] @@ -691,7 +691,7 @@ def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) -> raise wfe from template_error except Exception as error: _type, _value, tb = exc_info() - wfe = WorkflowTaskException(str(error), task=spiff_task, exception=error) + wfe = TaskModelException(str(error), task_model=task_model, exception=error) while tb: if tb.tb_frame.f_code.co_filename == "