precommit is passing w/ burnettk
This commit is contained in:
parent
2cbf2a2129
commit
b798ea4e45
|
@ -2,7 +2,6 @@
|
|||
import json
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
|
||||
|
@ -13,7 +12,6 @@ from flask import g
|
|||
from flask import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from werkzeug.datastructures import FileStorage
|
||||
|
||||
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
|
||||
ProcessEntityNotFoundError,
|
||||
|
@ -164,9 +162,7 @@ def process_model_update(
|
|||
return ProcessModelInfoSchema().dump(process_model)
|
||||
|
||||
|
||||
def process_model_show(
|
||||
process_group_id: str, process_model_id: str
|
||||
) -> Any:
|
||||
def process_model_show(process_group_id: str, process_model_id: str) -> Any:
|
||||
"""Process_model_show."""
|
||||
process_model = get_process_model(process_model_id, process_group_id)
|
||||
files = sorted(SpecFileService.get_files(process_model))
|
||||
|
@ -199,9 +195,7 @@ def process_model_list(
|
|||
return Response(json.dumps(response_json), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def get_file(
|
||||
process_group_id: str, process_model_id: str, file_name: str
|
||||
) -> Any:
|
||||
def get_file(process_group_id: str, process_model_id: str, file_name: str) -> Any:
|
||||
"""Get_file."""
|
||||
process_model = get_process_model(process_model_id, process_group_id)
|
||||
files = SpecFileService.get_files(process_model, file_name)
|
||||
|
@ -337,7 +331,10 @@ def process_instance_list(
|
|||
)
|
||||
|
||||
# this can never happen. obviously the class has the columns it defines. this is just to appease mypy.
|
||||
if ProcessInstanceModel.start_in_seconds is None or ProcessInstanceModel.end_in_seconds is None:
|
||||
if (
|
||||
ProcessInstanceModel.start_in_seconds is None
|
||||
or ProcessInstanceModel.end_in_seconds is None
|
||||
):
|
||||
raise (
|
||||
ApiError(
|
||||
code="unexpected_condition",
|
||||
|
@ -378,7 +375,9 @@ def process_instance_list(
|
|||
return Response(json.dumps(response_json), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_instance_delete(_process_group_id: str, _process_model_id: str, process_instance_id: int) -> flask.wrappers.Response:
|
||||
def process_instance_delete(
|
||||
_process_group_id: str, _process_model_id: str, process_instance_id: int
|
||||
) -> flask.wrappers.Response:
|
||||
"""Create_process_instance."""
|
||||
process_instance = ProcessInstanceModel.query.filter_by(
|
||||
id=process_instance_id
|
||||
|
|
|
@ -187,7 +187,9 @@ class ProcessModelService(FileSystemService):
|
|||
if item.is_dir() and item.name == process_group_id:
|
||||
return self.__scan_process_group(item)
|
||||
|
||||
raise ProcessEntityNotFoundError("process_group_not_found", f"Process Group Id: {process_group_id}")
|
||||
raise ProcessEntityNotFoundError(
|
||||
"process_group_not_found", f"Process Group Id: {process_group_id}"
|
||||
)
|
||||
|
||||
def add_process_group(self, process_group: ProcessGroup) -> ProcessGroup:
|
||||
"""Add_process_group."""
|
||||
|
|
|
@ -2,9 +2,9 @@
|
|||
import io
|
||||
import json
|
||||
import time
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
|
||||
import pytest
|
||||
from flask.app import Flask
|
||||
|
@ -1006,7 +1006,9 @@ def create_spec_file(
|
|||
return file
|
||||
|
||||
|
||||
def create_process_group(client: FlaskClient, user: Any, process_group_id: str, display_name: str = "") -> str:
|
||||
def create_process_group(
|
||||
client: FlaskClient, user: Any, process_group_id: str, display_name: str = ""
|
||||
) -> str:
|
||||
"""Create_process_group."""
|
||||
process_group = ProcessGroup(
|
||||
id=process_group_id, display_name=display_name, display_order=0, admin=False
|
||||
|
|
Loading…
Reference in New Issue