increase line length from 88 to 119 for black w/ burnettk
This commit is contained in:
parent
a6768a679d
commit
e305b22b5a
|
@ -18,8 +18,7 @@ repos:
|
|||
# --line-length because then we can avoid the fancy line wrapping in more instances and jason, kb, and elizabeth
|
||||
# kind of prefer long lines rather than cutely-formatted sets of lines.
|
||||
# TODO: enable when its safe to update the files
|
||||
# args: [--preview, --line-length, "110"]
|
||||
args: [--preview]
|
||||
args: [--preview, --line-length, "119"]
|
||||
|
||||
- id: check-added-large-files
|
||||
files: ^spiffworkflow-backend/
|
||||
|
|
|
@ -21,22 +21,14 @@ def main(process_instance_id: str) -> None:
|
|||
os.environ[flask_env_key] = "whatevs"
|
||||
app = create_app()
|
||||
with app.app_context():
|
||||
process_instance = ProcessInstanceModel.query.filter_by(
|
||||
id=process_instance_id
|
||||
).first()
|
||||
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first()
|
||||
|
||||
file_path = f"/var/tmp/{process_instance_id}_bpmn_json.json"
|
||||
if not process_instance:
|
||||
raise Exception(
|
||||
f"Could not find a process instance with id: {process_instance_id}"
|
||||
)
|
||||
raise Exception(f"Could not find a process instance with id: {process_instance_id}")
|
||||
|
||||
with open(file_path, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
json.dumps(
|
||||
ProcessInstanceProcessor._get_full_bpmn_json(process_instance)
|
||||
)
|
||||
)
|
||||
f.write(json.dumps(ProcessInstanceProcessor._get_full_bpmn_json(process_instance)))
|
||||
print(f"Saved to {file_path}")
|
||||
|
||||
|
||||
|
|
|
@ -28,8 +28,7 @@ def main():
|
|||
with app.app_context():
|
||||
process_model_identifier_ticket = "ticket"
|
||||
db.session.query(ProcessInstanceModel).filter(
|
||||
ProcessInstanceModel.process_model_identifier
|
||||
== process_model_identifier_ticket
|
||||
ProcessInstanceModel.process_model_identifier == process_model_identifier_ticket
|
||||
).delete()
|
||||
db.session.commit()
|
||||
|
||||
|
@ -60,9 +59,7 @@ def main():
|
|||
|
||||
header = next(reader)
|
||||
for column_name in columns_to_data_key_mappings:
|
||||
columns_to_header_index_mappings[column_name] = header.index(
|
||||
column_name
|
||||
)
|
||||
columns_to_header_index_mappings[column_name] = header.index(column_name)
|
||||
id_index = header.index("ID")
|
||||
priority_index = header.index("Priority")
|
||||
print(f"header: {header}")
|
||||
|
@ -87,9 +84,7 @@ def main():
|
|||
desired_data_key,
|
||||
) in columns_to_data_key_mappings.items():
|
||||
appropriate_index = columns_to_header_index_mappings[column_name]
|
||||
processor.bpmn_process_instance.data[desired_data_key] = row[
|
||||
appropriate_index
|
||||
]
|
||||
processor.bpmn_process_instance.data[desired_data_key] = row[appropriate_index]
|
||||
|
||||
print(f"datas: {processor.bpmn_process_instance.data}")
|
||||
if processor.bpmn_process_instance.data["month"] == "":
|
||||
|
|
|
@ -84,9 +84,7 @@ def main():
|
|||
) in columns_to_data_key_mappings.items():
|
||||
appropriate_index = columns_to_header_index_mappings[column_name]
|
||||
print(f"appropriate_index: {appropriate_index}")
|
||||
processor.bpmn_process_instance.data[desired_data_key] = row[
|
||||
appropriate_index
|
||||
]
|
||||
processor.bpmn_process_instance.data[desired_data_key] = row[appropriate_index]
|
||||
|
||||
# you at least need a month, or else this row in the csv is considered garbage
|
||||
month_value = processor.bpmn_process_instance.data["month"]
|
||||
|
|
|
@ -13,8 +13,7 @@ def main() -> None:
|
|||
for bpmn_errors in failing_process_models:
|
||||
print(bpmn_errors)
|
||||
if (
|
||||
os.environ.get("SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS")
|
||||
!= "false"
|
||||
os.environ.get("SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS") != "false"
|
||||
and len(failing_process_models) > 0
|
||||
):
|
||||
exit(1)
|
||||
|
|
|
@ -36,9 +36,7 @@ nox.options.sessions = (
|
|||
|
||||
def setup_database(session: Session) -> None:
|
||||
"""Run database migrations against the database."""
|
||||
session.env["FLASK_INSTANCE_PATH"] = os.path.join(
|
||||
os.getcwd(), "instance", "testing"
|
||||
)
|
||||
session.env["FLASK_INSTANCE_PATH"] = os.path.join(os.getcwd(), "instance", "testing")
|
||||
flask_env_key = "FLASK_SESSION_SECRET_KEY"
|
||||
session.env[flask_env_key] = "e7711a3ba96c46c68e084a86952de16f"
|
||||
session.env["FLASK_APP"] = "src/spiffworkflow_backend"
|
||||
|
@ -72,9 +70,7 @@ def activate_virtualenv_in_precommit_hooks(session: Session) -> None:
|
|||
|
||||
text = hook.read_text()
|
||||
bindir = repr(session.bin)[1:-1] # strip quotes
|
||||
if not (
|
||||
Path("A") == Path("a") and bindir.lower() in text.lower() or bindir in text
|
||||
):
|
||||
if not (Path("A") == Path("a") and bindir.lower() in text.lower() or bindir in text):
|
||||
continue
|
||||
|
||||
lines = text.splitlines()
|
||||
|
|
|
@ -63,16 +63,12 @@ class MyJSONEncoder(DefaultJSONProvider):
|
|||
return super().dumps(obj, **kwargs)
|
||||
|
||||
|
||||
def start_scheduler(
|
||||
app: flask.app.Flask, scheduler_class: BaseScheduler = BackgroundScheduler
|
||||
) -> None:
|
||||
def start_scheduler(app: flask.app.Flask, scheduler_class: BaseScheduler = BackgroundScheduler) -> None:
|
||||
"""Start_scheduler."""
|
||||
scheduler = scheduler_class()
|
||||
|
||||
# TODO: polling intervals for different jobs
|
||||
polling_interval_in_seconds = app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS"
|
||||
]
|
||||
polling_interval_in_seconds = app.config["SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS"]
|
||||
# TODO: add job to release locks to simplify other queries
|
||||
# TODO: add job to delete completed entires
|
||||
# TODO: add job to run old/low priority instances so they do not get drowned out
|
||||
|
@ -100,10 +96,7 @@ def should_start_scheduler(app: flask.app.Flask) -> bool:
|
|||
return False
|
||||
|
||||
# do not start the scheduler twice in flask debug mode but support code reloading
|
||||
if (
|
||||
app.config["ENV_IDENTIFIER"] != "local_development"
|
||||
or os.environ.get("WERKZEUG_RUN_MAIN") != "true"
|
||||
):
|
||||
if app.config["ENV_IDENTIFIER"] != "local_development" or os.environ.get("WERKZEUG_RUN_MAIN") != "true":
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -126,9 +119,7 @@ def create_app() -> flask.app.Flask:
|
|||
# variable, it will be one thing when we run flask db upgrade in the
|
||||
# noxfile and another thing when the tests actually run.
|
||||
# instance_path is described more at https://flask.palletsprojects.com/en/2.1.x/config/
|
||||
connexion_app = connexion.FlaskApp(
|
||||
__name__, server_args={"instance_path": os.environ.get("FLASK_INSTANCE_PATH")}
|
||||
)
|
||||
connexion_app = connexion.FlaskApp(__name__, server_args={"instance_path": os.environ.get("FLASK_INSTANCE_PATH")})
|
||||
app = connexion_app.app
|
||||
app.config["CONNEXION_APP"] = connexion_app
|
||||
app.config["SESSION_TYPE"] = "filesystem"
|
||||
|
@ -145,8 +136,7 @@ def create_app() -> flask.app.Flask:
|
|||
# we will add an Access-Control-Max-Age header to the response to tell the browser it doesn't
|
||||
# need to continually keep asking for the same path.
|
||||
origins_re = [
|
||||
r"^https?:\/\/%s(.*)" % o.replace(".", r"\.")
|
||||
for o in app.config["SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS"]
|
||||
r"^https?:\/\/%s(.*)" % o.replace(".", r"\.") for o in app.config["SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS"]
|
||||
]
|
||||
CORS(app, origins=origins_re, max_age=3600, supports_credentials=True)
|
||||
|
||||
|
@ -195,13 +185,9 @@ def get_hacked_up_app_for_script() -> flask.app.Flask:
|
|||
os.environ[flask_env_key] = "whatevs"
|
||||
if "SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR" not in os.environ:
|
||||
home = os.environ["HOME"]
|
||||
full_process_model_path = (
|
||||
f"{home}/projects/github/sartography/sample-process-models"
|
||||
)
|
||||
full_process_model_path = f"{home}/projects/github/sartography/sample-process-models"
|
||||
if os.path.isdir(full_process_model_path):
|
||||
os.environ["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] = (
|
||||
full_process_model_path
|
||||
)
|
||||
os.environ["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] = full_process_model_path
|
||||
else:
|
||||
raise Exception(f"Could not find {full_process_model_path}")
|
||||
app = create_app()
|
||||
|
@ -245,21 +231,13 @@ def configure_sentry(app: flask.app.Flask) -> None:
|
|||
return None
|
||||
return event
|
||||
|
||||
sentry_errors_sample_rate = app.config.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE"
|
||||
)
|
||||
sentry_errors_sample_rate = app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE")
|
||||
if sentry_errors_sample_rate is None:
|
||||
raise Exception(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE is not set somehow"
|
||||
)
|
||||
raise Exception("SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE is not set somehow")
|
||||
|
||||
sentry_traces_sample_rate = app.config.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE"
|
||||
)
|
||||
sentry_traces_sample_rate = app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE")
|
||||
if sentry_traces_sample_rate is None:
|
||||
raise Exception(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE is not set somehow"
|
||||
)
|
||||
raise Exception("SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE is not set somehow")
|
||||
|
||||
sentry_configs = {
|
||||
"dsn": app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_DSN"),
|
||||
|
@ -284,8 +262,6 @@ def configure_sentry(app: flask.app.Flask) -> None:
|
|||
# but also we commented out profiling because it was causing segfaults (i guess it is marked experimental)
|
||||
profiles_sample_rate = 0 if sys.platform.startswith("win") else 1
|
||||
if profiles_sample_rate > 0:
|
||||
sentry_configs["_experiments"] = {
|
||||
"profiles_sample_rate": profiles_sample_rate
|
||||
}
|
||||
sentry_configs["_experiments"] = {"profiles_sample_rate": profiles_sample_rate}
|
||||
|
||||
sentry_sdk.init(**sentry_configs)
|
||||
|
|
|
@ -30,13 +30,9 @@ def setup_database_uri(app: Flask) -> None:
|
|||
db_pswd = app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD")
|
||||
if db_pswd is None:
|
||||
db_pswd = ""
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = (
|
||||
f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}"
|
||||
)
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}"
|
||||
else:
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = app.config.get(
|
||||
"SPIFFWORKFLOW_BACKEND_DATABASE_URI"
|
||||
)
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI")
|
||||
|
||||
|
||||
def load_config_file(app: Flask, env_config_module: str) -> None:
|
||||
|
@ -45,30 +41,20 @@ def load_config_file(app: Flask, env_config_module: str) -> None:
|
|||
app.config.from_object(env_config_module)
|
||||
print(f"loaded config: {env_config_module}")
|
||||
except ImportStringError as exception:
|
||||
if (
|
||||
os.environ.get("SPIFFWORKFLOW_BACKEND_TERRAFORM_DEPLOYED_ENVIRONMENT")
|
||||
!= "true"
|
||||
):
|
||||
raise ModuleNotFoundError(
|
||||
f"Cannot find config module: {env_config_module}"
|
||||
) from exception
|
||||
if os.environ.get("SPIFFWORKFLOW_BACKEND_TERRAFORM_DEPLOYED_ENVIRONMENT") != "true":
|
||||
raise ModuleNotFoundError(f"Cannot find config module: {env_config_module}") from exception
|
||||
|
||||
|
||||
def _set_up_tenant_specific_fields_as_list_of_strings(app: Flask) -> None:
|
||||
tenant_specific_fields = app.config.get(
|
||||
"SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"
|
||||
)
|
||||
tenant_specific_fields = app.config.get("SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS")
|
||||
|
||||
if tenant_specific_fields is None or tenant_specific_fields == "":
|
||||
app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"] = []
|
||||
else:
|
||||
app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"] = (
|
||||
tenant_specific_fields.split(",")
|
||||
)
|
||||
app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"] = tenant_specific_fields.split(",")
|
||||
if len(app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"]) > 3:
|
||||
raise ConfigurationError(
|
||||
"SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS can have a"
|
||||
" maximum of 3 fields"
|
||||
"SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS can have a maximum of 3 fields"
|
||||
)
|
||||
|
||||
|
||||
|
@ -80,9 +66,7 @@ def setup_config(app: Flask) -> None:
|
|||
except OSError:
|
||||
pass
|
||||
|
||||
app.config["ENV_IDENTIFIER"] = os.environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_ENV", "local_development"
|
||||
)
|
||||
app.config["ENV_IDENTIFIER"] = os.environ.get("SPIFFWORKFLOW_BACKEND_ENV", "local_development")
|
||||
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
|
||||
load_config_file(app, "spiffworkflow_backend.config.default")
|
||||
|
||||
|
@ -99,10 +83,7 @@ def setup_config(app: Flask) -> None:
|
|||
# This allows config/testing.py or instance/config.py to override the default config
|
||||
if "ENV_IDENTIFIER" in app.config and app.config["ENV_IDENTIFIER"] == "testing":
|
||||
app.config.from_pyfile("config/testing.py", silent=True)
|
||||
elif (
|
||||
"ENV_IDENTIFIER" in app.config
|
||||
and app.config["ENV_IDENTIFIER"] == "unit_testing"
|
||||
):
|
||||
elif "ENV_IDENTIFIER" in app.config and app.config["ENV_IDENTIFIER"] == "unit_testing":
|
||||
app.config.from_pyfile("config/unit_testing.py", silent=True)
|
||||
else:
|
||||
app.config.from_pyfile(f"{app.instance_path}/config.py", silent=True)
|
||||
|
@ -125,15 +106,10 @@ def setup_config(app: Flask) -> None:
|
|||
app.config.from_pyfile(os.path.join("config", "secrets.py"), silent=True)
|
||||
|
||||
if app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] is None:
|
||||
raise ConfigurationError(
|
||||
"SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR config must be set"
|
||||
)
|
||||
raise ConfigurationError("SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR config must be set")
|
||||
|
||||
if app.config["FLASK_SESSION_SECRET_KEY"] is None:
|
||||
raise KeyError(
|
||||
"Cannot find the secret_key from the environment. Please set"
|
||||
" FLASK_SESSION_SECRET_KEY"
|
||||
)
|
||||
raise KeyError("Cannot find the secret_key from the environment. Please set FLASK_SESSION_SECRET_KEY")
|
||||
|
||||
app.secret_key = os.environ.get("FLASK_SESSION_SECRET_KEY")
|
||||
|
||||
|
|
|
@ -8,9 +8,7 @@ from os import environ
|
|||
|
||||
FLASK_SESSION_SECRET_KEY = environ.get("FLASK_SESSION_SECRET_KEY")
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR = environ.get("SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR")
|
||||
cors_allow_all = "*"
|
||||
SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS = re.split(
|
||||
r",\s*",
|
||||
|
@ -18,8 +16,7 @@ SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS = re.split(
|
|||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = (
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false")
|
||||
== "true"
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") == "true"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS = int(
|
||||
environ.get(
|
||||
|
@ -30,9 +27,7 @@ SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS = int(
|
|||
SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND", default="http://localhost:7001"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_URL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_URL", default="http://localhost:7000"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_URL = environ.get("SPIFFWORKFLOW_BACKEND_URL", default="http://localhost:7000")
|
||||
# service task connector proxy
|
||||
SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL", default="http://localhost:7004"
|
||||
|
@ -68,18 +63,12 @@ SPIFFWORKFLOW_BACKEND_ENCRYPTION_LIB = environ.get(
|
|||
default="no_op_cipher",
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = (
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="false") == "true"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="false") == "true"
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get("SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME")
|
||||
|
||||
# Sentry Configuration
|
||||
SPIFFWORKFLOW_BACKEND_SENTRY_DSN = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_DSN", default=""
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_SENTRY_DSN = environ.get("SPIFFWORKFLOW_BACKEND_SENTRY_DSN", default="")
|
||||
SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE", default="1"
|
||||
) # send all errors
|
||||
|
@ -89,43 +78,28 @@ SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE = environ.get(
|
|||
SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG", default=None
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG", default=None
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG = environ.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG", default=None)
|
||||
SPIFFWORKFLOW_BACKEND_SENTRY_PROFILING_ENABLED = (
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROFILING_ENABLED", default="false")
|
||||
== "true"
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROFILING_ENABLED", default="false") == "true"
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="info"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get("SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="info")
|
||||
|
||||
# When a user clicks on the `Publish` button, this is the default branch this server merges into.
|
||||
# I.e., dev server could have `staging` here. Staging server might have `production` here.
|
||||
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH = environ.get("SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH")
|
||||
# This is the branch that the app automatically commits to every time the user clicks the save button
|
||||
# or otherwise changes a process model.
|
||||
# If publishing is enabled, the contents of this "staging area" / "scratch pad" / WIP spot will be used
|
||||
# as the relevant contents for process model that the user wants to publish.
|
||||
SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get("SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH")
|
||||
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get("SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL")
|
||||
SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = (
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE", default="false") == "true"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USERNAME = environ.get("SPIFFWORKFLOW_BACKEND_GIT_USERNAME")
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET", default=None
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = environ.get("SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL")
|
||||
SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET = environ.get("SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET", default=None)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH", default=None
|
||||
)
|
||||
|
@ -135,23 +109,17 @@ SPIFFWORKFLOW_BACKEND_DATABASE_TYPE = environ.get(
|
|||
"SPIFFWORKFLOW_BACKEND_DATABASE_TYPE", default="mysql"
|
||||
) # can also be sqlite, postgres
|
||||
# Overide above with specific sqlalchymy connection string.
|
||||
SPIFFWORKFLOW_BACKEND_DATABASE_URI = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_DATABASE_URI", default=None
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_DATABASE_URI = environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI", default=None)
|
||||
SPIFFWORKFLOW_BACKEND_SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID",
|
||||
default="Message_SystemMessageNotification",
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_ALLOW_CONFISCATING_LOCK_AFTER_SECONDS = int(
|
||||
environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_ALLOW_CONFISCATING_LOCK_AFTER_SECONDS", default="600"
|
||||
)
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_ALLOW_CONFISCATING_LOCK_AFTER_SECONDS", default="600")
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP", default="everybody"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP = environ.get("SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP", default="everybody")
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND", default="greedy"
|
||||
|
@ -162,6 +130,4 @@ SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB = environ.get(
|
|||
)
|
||||
|
||||
# this is only used in CI. use SPIFFWORKFLOW_BACKEND_DATABASE_URI instead for real configuration
|
||||
SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD", default=None
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD = environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD", default=None)
|
||||
|
|
|
@ -10,6 +10,5 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
|||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = (
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false")
|
||||
== "true"
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") == "true"
|
||||
)
|
||||
|
|
|
@ -5,19 +5,14 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
|||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="local_development.yml"
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get("SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug")
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = (
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false")
|
||||
== "true"
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") == "true"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL",
|
||||
default="https://github.com/sartography/sample-process-models.git",
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USERNAME = "sartography-automated-committer"
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = (
|
||||
f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com"
|
||||
|
|
|
@ -5,10 +5,6 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
|||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="qa1.yml"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = "https://qa2.spiffworkflow.org"
|
||||
SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = (
|
||||
"https://qa2.spiffworkflow.org/keycloak/realms/spiffworkflow"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = "https://qa2.spiffworkflow.org/keycloak/realms/spiffworkflow"
|
||||
SPIFFWORKFLOW_BACKEND_URL = "https://qa2.spiffworkflow.org/api"
|
||||
SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = (
|
||||
"https://qa2.spiffworkflow.org/connector-proxy"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = "https://qa2.spiffworkflow.org/connector-proxy"
|
||||
|
|
|
@ -3,12 +3,9 @@ from os import environ
|
|||
|
||||
environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEND_ENV"]
|
||||
SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = (
|
||||
f"https://keycloak.{environment_identifier_for_this_config_file_only}"
|
||||
".spiffworkflow.org/realms/sartography"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH", default="main"
|
||||
f"https://keycloak.{environment_identifier_for_this_config_file_only}.spiffworkflow.org/realms/sartography"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get("SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH", default="main")
|
||||
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL",
|
||||
default="https://github.com/sartography/sartography-process-models.git",
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
"""Staging."""
|
||||
from os import environ
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH", default="staging"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get("SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH", default="staging")
|
||||
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH", default="main"
|
||||
)
|
||||
|
|
|
@ -6,36 +6,29 @@ environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEN
|
|||
|
||||
SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = True
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USERNAME = "sartography-automated-committer"
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = (
|
||||
f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com"
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME",
|
||||
default="terraform_deployed_environment.yml",
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = (
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false")
|
||||
== "true"
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") == "true"
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL",
|
||||
default=(
|
||||
f"https://keycloak.{environment_identifier_for_this_config_file_only}"
|
||||
".spiffworkflow.org/realms/spiffworkflow"
|
||||
f"https://keycloak.{environment_identifier_for_this_config_file_only}.spiffworkflow.org/realms/spiffworkflow"
|
||||
),
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = (
|
||||
f"https://{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_URL = (
|
||||
f"https://api.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_URL = f"https://api.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
|
||||
SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = (
|
||||
f"https://connector-proxy.{environment_identifier_for_this_config_file_only}"
|
||||
".spiffworkflow.org"
|
||||
f"https://connector-proxy.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL",
|
||||
|
|
|
@ -4,17 +4,13 @@ from os import environ
|
|||
|
||||
TESTING = True
|
||||
SECRET_KEY = "the_secret_key"
|
||||
SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = (
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="true") == "true"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="true") == "true"
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="unit_testing.yml"
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get("SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug")
|
||||
SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = False
|
||||
|
||||
# NOTE: set this here since nox shoves tests and src code to
|
||||
|
|
|
@ -202,20 +202,13 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response:
|
|||
|
||||
if isinstance(exception, ApiError):
|
||||
current_app.logger.info(
|
||||
f"Sending ApiError exception to sentry: {exception} with error code"
|
||||
f" {exception.error_code}"
|
||||
f"Sending ApiError exception to sentry: {exception} with error code {exception.error_code}"
|
||||
)
|
||||
|
||||
organization_slug = current_app.config.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG"
|
||||
)
|
||||
project_slug = current_app.config.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG"
|
||||
)
|
||||
organization_slug = current_app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG")
|
||||
project_slug = current_app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG")
|
||||
if organization_slug and project_slug:
|
||||
sentry_link = (
|
||||
f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}"
|
||||
)
|
||||
sentry_link = f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}"
|
||||
|
||||
# !!!NOTE!!!: do this after sentry stuff since calling logger.exception
|
||||
# seems to break the sentry sdk context where we no longer get back
|
||||
|
|
|
@ -18,9 +18,7 @@ class BpmnProcessModel(SpiffworkflowBaseDBModel):
|
|||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
guid: str | None = db.Column(db.String(36), nullable=True, unique=True, index=True)
|
||||
|
||||
parent_process_id: int | None = db.Column(
|
||||
ForeignKey("bpmn_process.id"), nullable=True
|
||||
)
|
||||
parent_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True)
|
||||
|
||||
properties_json: dict = db.Column(db.JSON, nullable=False)
|
||||
json_data_hash: str = db.Column(db.String(255), nullable=False, index=True)
|
||||
|
|
|
@ -39,16 +39,12 @@ class SpiffworkflowBaseDBModel(db.Model): # type: ignore
|
|||
children.append(subclass)
|
||||
return result
|
||||
|
||||
def validate_enum_field(
|
||||
self, key: str, value: Any, enum_variable: enum.EnumMeta
|
||||
) -> Any:
|
||||
def validate_enum_field(self, key: str, value: Any, enum_variable: enum.EnumMeta) -> Any:
|
||||
"""Validate_enum_field."""
|
||||
try:
|
||||
m_type = getattr(enum_variable, value, None)
|
||||
except Exception as e:
|
||||
raise ValueError(
|
||||
f"{self.__class__.__name__}: invalid {key}: {value}"
|
||||
) from e
|
||||
raise ValueError(f"{self.__class__.__name__}: invalid {key}: {value}") from e
|
||||
|
||||
if m_type is None:
|
||||
raise ValueError(f"{self.__class__.__name__}: invalid {key}: {value}")
|
||||
|
|
|
@ -126,6 +126,4 @@ class FileSchema(Schema):
|
|||
"process_model_id",
|
||||
]
|
||||
unknown = INCLUDE
|
||||
references = marshmallow.fields.List(
|
||||
marshmallow.fields.Nested("SpecReferenceSchema")
|
||||
)
|
||||
references = marshmallow.fields.List(marshmallow.fields.Nested("SpecReferenceSchema"))
|
||||
|
|
|
@ -30,9 +30,7 @@ class GroupModel(SpiffworkflowBaseDBModel):
|
|||
identifier = db.Column(db.String(255))
|
||||
|
||||
user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete")
|
||||
user_group_assignments_waiting = relationship( # type: ignore
|
||||
"UserGroupAssignmentWaitingModel", cascade="delete"
|
||||
)
|
||||
user_group_assignments_waiting = relationship("UserGroupAssignmentWaitingModel", cascade="delete") # type: ignore
|
||||
users = relationship( # type: ignore
|
||||
"UserModel",
|
||||
viewonly=True,
|
||||
|
|
|
@ -28,15 +28,11 @@ class HumanTaskModel(SpiffworkflowBaseDBModel):
|
|||
__tablename__ = "human_task"
|
||||
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
process_instance_id: int = db.Column(
|
||||
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
|
||||
)
|
||||
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore
|
||||
lane_assignment_id: int | None = db.Column(ForeignKey(GroupModel.id))
|
||||
completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True) # type: ignore
|
||||
|
||||
completed_by_user = relationship(
|
||||
"UserModel", foreign_keys=[completed_by_user_id], viewonly=True
|
||||
)
|
||||
completed_by_user = relationship("UserModel", foreign_keys=[completed_by_user_id], viewonly=True)
|
||||
|
||||
actual_owner_id: int = db.Column(ForeignKey(UserModel.id)) # type: ignore
|
||||
# actual_owner: RelationshipProperty[UserModel] = relationship(UserModel)
|
||||
|
|
|
@ -27,9 +27,7 @@ class HumanTaskUserModel(SpiffworkflowBaseDBModel):
|
|||
)
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
human_task_id = db.Column(
|
||||
ForeignKey(HumanTaskModel.id), nullable=False, index=True # type: ignore
|
||||
)
|
||||
human_task_id = db.Column(ForeignKey(HumanTaskModel.id), nullable=False, index=True) # type: ignore
|
||||
user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore
|
||||
|
||||
human_task = relationship(HumanTaskModel)
|
||||
|
|
|
@ -34,13 +34,9 @@ class JsonDataModel(SpiffworkflowBaseDBModel):
|
|||
|
||||
@classmethod
|
||||
def find_object_by_hash(cls, hash: str) -> JsonDataModel:
|
||||
json_data_model: JsonDataModel | None = JsonDataModel.query.filter_by(
|
||||
hash=hash
|
||||
).first()
|
||||
json_data_model: JsonDataModel | None = JsonDataModel.query.filter_by(hash=hash).first()
|
||||
if json_data_model is None:
|
||||
raise JsonDataModelNotFoundError(
|
||||
f"Could not find a json data model entry with hash: {hash}"
|
||||
)
|
||||
raise JsonDataModelNotFoundError(f"Could not find a json data model entry with hash: {hash}")
|
||||
return json_data_model
|
||||
|
||||
@classmethod
|
||||
|
|
|
@ -63,9 +63,7 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
|
|||
failure_cause: str = db.Column(db.Text())
|
||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
||||
correlation_rules = relationship(
|
||||
"MessageInstanceCorrelationRuleModel", back_populates="message_instance"
|
||||
)
|
||||
correlation_rules = relationship("MessageInstanceCorrelationRuleModel", back_populates="message_instance")
|
||||
|
||||
@validates("message_type")
|
||||
def validate_message_type(self, key: str, value: Any) -> Any:
|
||||
|
@ -94,10 +92,7 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
|
|||
return False
|
||||
if not self.is_receive():
|
||||
return False
|
||||
if (
|
||||
isinstance(self.correlation_keys, dict)
|
||||
and self.correlation_keys == other.correlation_keys
|
||||
):
|
||||
if isinstance(self.correlation_keys, dict) and self.correlation_keys == other.correlation_keys:
|
||||
# We know we have a match, and we can just return if we don't have to figure out the key
|
||||
return True
|
||||
|
||||
|
@ -107,9 +102,7 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
|
|||
|
||||
# Loop over the receives' correlation keys - if any of the keys fully match, then we match.
|
||||
for expected_values in self.correlation_keys.values():
|
||||
if self.payload_matches_expected_values(
|
||||
other.payload, expected_values, expression_engine
|
||||
):
|
||||
if self.payload_matches_expected_values(other.payload, expected_values, expression_engine):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
@ -128,23 +121,17 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
|
|||
"""Compares the payload of a 'send' message against a single correlation key's expected values."""
|
||||
for correlation_key in self.correlation_rules:
|
||||
expected_value = expected_values.get(correlation_key.name, None)
|
||||
if (
|
||||
expected_value is None
|
||||
): # This key is not required for this instance to match.
|
||||
if expected_value is None: # This key is not required for this instance to match.
|
||||
continue
|
||||
try:
|
||||
result = expression_engine._evaluate(
|
||||
correlation_key.retrieval_expression, payload
|
||||
)
|
||||
result = expression_engine._evaluate(correlation_key.retrieval_expression, payload)
|
||||
except Exception as e:
|
||||
# the failure of a payload evaluation may not mean that matches for these
|
||||
# message instances can't happen with other messages. So don't error up.
|
||||
# fixme: Perhaps log some sort of error.
|
||||
current_app.logger.warning(
|
||||
"Error evaluating correlation key when comparing send and receive"
|
||||
" messages."
|
||||
+ f"Expression {correlation_key.retrieval_expression} failed with"
|
||||
" the error "
|
||||
"Error evaluating correlation key when comparing send and receive messages."
|
||||
+ f"Expression {correlation_key.retrieval_expression} failed with the error "
|
||||
+ str(e)
|
||||
)
|
||||
return False
|
||||
|
@ -168,7 +155,4 @@ def ensure_failure_cause_is_set_if_message_instance_failed(
|
|||
for instance in session.new:
|
||||
if isinstance(instance, MessageInstanceModel):
|
||||
if instance.status == "failed" and instance.failure_cause is None:
|
||||
raise ValueError(
|
||||
f"{instance.__class__.__name__}: failure_cause must be set if"
|
||||
" status is failed"
|
||||
)
|
||||
raise ValueError(f"{instance.__class__.__name__}: failure_cause must be set if status is failed")
|
||||
|
|
|
@ -29,13 +29,9 @@ class MessageInstanceCorrelationRuleModel(SpiffworkflowBaseDBModel):
|
|||
)
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
message_instance_id = db.Column(
|
||||
ForeignKey(MessageInstanceModel.id), nullable=False, index=True # type: ignore
|
||||
)
|
||||
message_instance_id = db.Column(ForeignKey(MessageInstanceModel.id), nullable=False, index=True) # type: ignore
|
||||
name: str = db.Column(db.String(50), nullable=False)
|
||||
retrieval_expression: str = db.Column(db.String(255))
|
||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
||||
message_instance = relationship(
|
||||
"MessageInstanceModel", back_populates="correlation_rules"
|
||||
)
|
||||
message_instance = relationship("MessageInstanceModel", back_populates="correlation_rules")
|
||||
|
|
|
@ -47,9 +47,7 @@ class PermissionAssignmentModel(SpiffworkflowBaseDBModel):
|
|||
)
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
principal_id = db.Column(ForeignKey(PrincipalModel.id), nullable=False)
|
||||
permission_target_id = db.Column(
|
||||
ForeignKey(PermissionTargetModel.id), nullable=False # type: ignore
|
||||
)
|
||||
permission_target_id = db.Column(ForeignKey(PermissionTargetModel.id), nullable=False) # type: ignore
|
||||
grant_type = db.Column(db.String(50), nullable=False)
|
||||
permission = db.Column(db.String(50), nullable=False)
|
||||
|
||||
|
|
|
@ -35,7 +35,5 @@ class PermissionTargetModel(SpiffworkflowBaseDBModel):
|
|||
def validate_uri(self, key: str, value: str) -> str:
|
||||
"""Validate_uri."""
|
||||
if re.search(r"%.", value):
|
||||
raise InvalidPermissionTargetUriError(
|
||||
f"Wildcard must appear at end: {value}"
|
||||
)
|
||||
raise InvalidPermissionTargetUriError(f"Wildcard must appear at end: {value}")
|
||||
return value
|
||||
|
|
|
@ -26,9 +26,7 @@ class ProcessGroup:
|
|||
description: str | None = None
|
||||
display_order: int | None = 0
|
||||
admin: bool | None = False
|
||||
process_models: list[ProcessModelInfo] = field(
|
||||
default_factory=list[ProcessModelInfo]
|
||||
)
|
||||
process_models: list[ProcessModelInfo] = field(default_factory=list[ProcessModelInfo])
|
||||
process_groups: list[ProcessGroup] = field(default_factory=list["ProcessGroup"])
|
||||
parent_groups: list[ProcessGroupLite] | None = None
|
||||
|
||||
|
@ -74,17 +72,13 @@ class ProcessGroupSchema(Schema):
|
|||
]
|
||||
|
||||
process_models = marshmallow.fields.List(
|
||||
marshmallow.fields.Nested(
|
||||
"ProcessModelInfoSchema", dump_only=True, required=False
|
||||
)
|
||||
marshmallow.fields.Nested("ProcessModelInfoSchema", dump_only=True, required=False)
|
||||
)
|
||||
process_groups = marshmallow.fields.List(
|
||||
marshmallow.fields.Nested("ProcessGroupSchema", dump_only=True, required=False)
|
||||
)
|
||||
|
||||
@post_load
|
||||
def make_process_group(
|
||||
self, data: dict[str, str | bool | int], **kwargs: dict
|
||||
) -> ProcessGroup:
|
||||
def make_process_group(self, data: dict[str, str | bool | int], **kwargs: dict) -> ProcessGroup:
|
||||
"""Make_process_group."""
|
||||
return ProcessGroup(**data) # type: ignore
|
||||
|
|
|
@ -55,12 +55,8 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
|
||||
__tablename__ = "process_instance"
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
process_model_identifier: str = db.Column(
|
||||
db.String(255), nullable=False, index=True
|
||||
)
|
||||
process_model_display_name: str = db.Column(
|
||||
db.String(255), nullable=False, index=True
|
||||
)
|
||||
process_model_identifier: str = db.Column(db.String(255), nullable=False, index=True)
|
||||
process_model_display_name: str = db.Column(db.String(255), nullable=False, index=True)
|
||||
process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore
|
||||
process_initiator = relationship("UserModel")
|
||||
|
||||
|
@ -68,9 +64,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
ForeignKey(BpmnProcessDefinitionModel.id), nullable=True # type: ignore
|
||||
)
|
||||
bpmn_process_definition = relationship(BpmnProcessDefinitionModel)
|
||||
bpmn_process_id: int | None = db.Column(
|
||||
ForeignKey(BpmnProcessModel.id), nullable=True # type: ignore
|
||||
)
|
||||
bpmn_process_id: int | None = db.Column(ForeignKey(BpmnProcessModel.id), nullable=True) # type: ignore
|
||||
bpmn_process = relationship(BpmnProcessModel, cascade="delete")
|
||||
tasks = relationship("TaskModel", cascade="delete") # type: ignore
|
||||
|
||||
|
@ -79,8 +73,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
active_human_tasks = relationship(
|
||||
"HumanTaskModel",
|
||||
primaryjoin=(
|
||||
"and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id,"
|
||||
" HumanTaskModel.completed == False)"
|
||||
"and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id, HumanTaskModel.completed == False)"
|
||||
),
|
||||
) # type: ignore
|
||||
|
||||
|
@ -242,9 +235,7 @@ class ProcessInstanceApiSchema(Schema):
|
|||
next_task = marshmallow.fields.Nested(TaskSchema, dump_only=True, required=False)
|
||||
|
||||
@marshmallow.post_load
|
||||
def make_process_instance(
|
||||
self, data: dict[str, Any], **kwargs: dict
|
||||
) -> ProcessInstanceApi:
|
||||
def make_process_instance(self, data: dict[str, Any], **kwargs: dict) -> ProcessInstanceApi:
|
||||
"""Make_process_instance."""
|
||||
keys = [
|
||||
"id",
|
||||
|
|
|
@ -17,17 +17,13 @@ class ProcessInstanceFileDataModel(SpiffworkflowBaseDBModel):
|
|||
__tablename__ = "process_instance_file_data"
|
||||
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
process_instance_id: int = db.Column(
|
||||
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
|
||||
)
|
||||
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore
|
||||
identifier: str = db.Column(db.String(255), nullable=False)
|
||||
list_index: Optional[int] = db.Column(db.Integer, nullable=True)
|
||||
mimetype: str = db.Column(db.String(255), nullable=False)
|
||||
filename: str = db.Column(db.String(255), nullable=False)
|
||||
# this is not deferred because there is no reason to query this model if you do not want the contents
|
||||
contents: str = db.Column(
|
||||
db.LargeBinary().with_variant(LONGBLOB, "mysql"), nullable=False
|
||||
)
|
||||
contents: str = db.Column(db.LargeBinary().with_variant(LONGBLOB, "mysql"), nullable=False)
|
||||
digest: str = db.Column(db.String(64), nullable=False, index=True)
|
||||
|
||||
updated_at_in_seconds: int = db.Column(db.Integer, nullable=False)
|
||||
|
|
|
@ -13,16 +13,10 @@ class ProcessInstanceMetadataModel(SpiffworkflowBaseDBModel):
|
|||
"""ProcessInstanceMetadataModel."""
|
||||
|
||||
__tablename__ = "process_instance_metadata"
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint(
|
||||
"process_instance_id", "key", name="process_instance_metadata_unique"
|
||||
),
|
||||
)
|
||||
__table_args__ = (db.UniqueConstraint("process_instance_id", "key", name="process_instance_metadata_unique"),)
|
||||
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
process_instance_id: int = db.Column(
|
||||
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
|
||||
)
|
||||
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore
|
||||
key: str = db.Column(db.String(255), nullable=False, index=True)
|
||||
value: str = db.Column(db.String(255), nullable=False)
|
||||
|
||||
|
|
|
@ -22,9 +22,7 @@ class ProcessInstanceQueueModel(SpiffworkflowBaseDBModel):
|
|||
run_at_in_seconds: int = db.Column(db.Integer)
|
||||
priority: int = db.Column(db.Integer)
|
||||
locked_by: Union[str, None] = db.Column(db.String(80), index=True, nullable=True)
|
||||
locked_at_in_seconds: Union[int, None] = db.Column(
|
||||
db.Integer, index=True, nullable=True
|
||||
)
|
||||
locked_at_in_seconds: Union[int, None] = db.Column(db.Integer, index=True, nullable=True)
|
||||
status: str = db.Column(db.String(50), index=True)
|
||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
||||
|
|
|
@ -187,9 +187,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
|
|||
{"Header": "priority", "accessor": "priority"},
|
||||
],
|
||||
"order": "month asc",
|
||||
"filter_by": [
|
||||
{"field_name": "month", "operator": "equals", "field_value": "3"}
|
||||
],
|
||||
"filter_by": [{"field_name": "month", "operator": "equals", "field_value": "3"}],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
|
@ -233,25 +231,19 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
|
|||
if substitution_variables is not None:
|
||||
for key, value in substitution_variables.items():
|
||||
if isinstance(value, str) or isinstance(value, int):
|
||||
field_value = str(field_value).replace(
|
||||
"{{" + key + "}}", str(value)
|
||||
)
|
||||
field_value = str(field_value).replace("{{" + key + "}}", str(value))
|
||||
return field_value
|
||||
|
||||
# modeled after https://github.com/suyash248/sqlalchemy-json-querybuilder
|
||||
# just supports "equals" operator for now.
|
||||
# perhaps we will use the database instead of filtering in memory in the future and then we might use this lib directly.
|
||||
def passes_filter(
|
||||
self, process_instance_dict: dict, substitution_variables: dict
|
||||
) -> bool:
|
||||
def passes_filter(self, process_instance_dict: dict, substitution_variables: dict) -> bool:
|
||||
"""Passes_filter."""
|
||||
if "filter_by" in self.report_metadata:
|
||||
for filter_by in self.report_metadata["filter_by"]:
|
||||
field_name = filter_by["field_name"]
|
||||
operator = filter_by["operator"]
|
||||
field_value = self.with_substitutions(
|
||||
filter_by["field_value"], substitution_variables
|
||||
)
|
||||
field_value = self.with_substitutions(filter_by["field_value"], substitution_variables)
|
||||
if operator == "equals":
|
||||
if str(process_instance_dict.get(field_name)) != str(field_value):
|
||||
return False
|
||||
|
@ -274,9 +266,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
|
|||
sort_value = process_instance_dict.get(order_by_item)
|
||||
comparison_values.append(Reversor(sort_value))
|
||||
else:
|
||||
sort_value = cast(
|
||||
Optional[str], process_instance_dict.get(order_by_item)
|
||||
)
|
||||
sort_value = cast(Optional[str], process_instance_dict.get(order_by_item))
|
||||
comparison_values.append(sort_value)
|
||||
return comparison_values
|
||||
|
||||
|
@ -307,20 +297,14 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
|
|||
results = self.order_things(results)
|
||||
|
||||
if "columns" in self.report_metadata:
|
||||
column_keys_to_keep = [
|
||||
c["accessor"] for c in self.report_metadata["columns"]
|
||||
]
|
||||
column_keys_to_keep = [c["accessor"] for c in self.report_metadata["columns"]]
|
||||
|
||||
pruned_results = []
|
||||
for result in results:
|
||||
dict_you_want = {
|
||||
your_key: result[your_key]
|
||||
for your_key in column_keys_to_keep
|
||||
if result.get(your_key)
|
||||
your_key: result[your_key] for your_key in column_keys_to_keep if result.get(your_key)
|
||||
}
|
||||
pruned_results.append(dict_you_want)
|
||||
results = pruned_results
|
||||
|
||||
return ProcessInstanceReportResult(
|
||||
report_metadata=self.report_metadata, results=results
|
||||
)
|
||||
return ProcessInstanceReportResult(report_metadata=self.report_metadata, results=results)
|
||||
|
|
|
@ -89,9 +89,7 @@ class ProcessModelInfoSchema(Schema):
|
|||
primary_process_id = marshmallow.fields.String(allow_none=True)
|
||||
files = marshmallow.fields.List(marshmallow.fields.Nested("FileSchema"))
|
||||
fault_or_suspend_on_exception = marshmallow.fields.String()
|
||||
exception_notification_addresses = marshmallow.fields.List(
|
||||
marshmallow.fields.String
|
||||
)
|
||||
exception_notification_addresses = marshmallow.fields.List(marshmallow.fields.String)
|
||||
metadata_extraction_paths = marshmallow.fields.List(
|
||||
marshmallow.fields.Dict(
|
||||
keys=marshmallow.fields.Str(required=False),
|
||||
|
@ -101,8 +99,6 @@ class ProcessModelInfoSchema(Schema):
|
|||
)
|
||||
|
||||
@post_load
|
||||
def make_spec(
|
||||
self, data: dict[str, str | bool | int | NotificationType], **_: Any
|
||||
) -> ProcessModelInfo:
|
||||
def make_spec(self, data: dict[str, str | bool | int | NotificationType], **_: Any) -> ProcessModelInfo:
|
||||
"""Make_spec."""
|
||||
return ProcessModelInfo(**data) # type: ignore
|
||||
|
|
|
@ -41,9 +41,7 @@ class SpecReferenceCache(SpiffworkflowBaseDBModel):
|
|||
"""A cache of information about all the Processes and Decisions defined in all files."""
|
||||
|
||||
__tablename__ = "spec_reference_cache"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("identifier", "type", name="_identifier_type_unique"),
|
||||
)
|
||||
__table_args__ = (UniqueConstraint("identifier", "type", name="_identifier_type_unique"),)
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
identifier = db.Column(db.String(255), index=True)
|
||||
display_name = db.Column(db.String(255), index=True)
|
||||
|
|
|
@ -16,16 +16,10 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel):
|
|||
"""SpiffStepDetailsModel."""
|
||||
|
||||
__tablename__ = "spiff_step_details"
|
||||
__table_args__ = (
|
||||
UniqueConstraint(
|
||||
"process_instance_id", "spiff_step", name="process_instance_id_spiff_step"
|
||||
),
|
||||
)
|
||||
__table_args__ = (UniqueConstraint("process_instance_id", "spiff_step", name="process_instance_id_spiff_step"),)
|
||||
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
process_instance_id: int = db.Column(
|
||||
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
|
||||
)
|
||||
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore
|
||||
spiff_step: int = db.Column(db.Integer, nullable=False)
|
||||
task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore
|
||||
task_id: str = db.Column(db.String(50), nullable=False)
|
||||
|
|
|
@ -45,17 +45,11 @@ class TaskModel(SpiffworkflowBaseDBModel):
|
|||
__tablename__ = "task"
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
guid: str = db.Column(db.String(36), nullable=False, unique=True, index=True)
|
||||
bpmn_process_id: int = db.Column(
|
||||
ForeignKey(BpmnProcessModel.id), nullable=False # type: ignore
|
||||
)
|
||||
process_instance_id: int = db.Column(
|
||||
ForeignKey("process_instance.id"), nullable=False
|
||||
)
|
||||
bpmn_process_id: int = db.Column(ForeignKey(BpmnProcessModel.id), nullable=False) # type: ignore
|
||||
process_instance_id: int = db.Column(ForeignKey("process_instance.id"), nullable=False)
|
||||
|
||||
# find this by looking up the "workflow_name" and "task_spec" from the properties_json
|
||||
task_definition_id: int = db.Column(
|
||||
ForeignKey(TaskDefinitionModel.id), nullable=False # type: ignore
|
||||
)
|
||||
task_definition_id: int = db.Column(ForeignKey(TaskDefinitionModel.id), nullable=False) # type: ignore
|
||||
task_definition = relationship("TaskDefinitionModel")
|
||||
|
||||
state: str = db.Column(db.String(10), nullable=False)
|
||||
|
@ -137,15 +131,9 @@ class Task:
|
|||
self.form_schema = form_schema
|
||||
self.form_ui_schema = form_ui_schema
|
||||
|
||||
self.multi_instance_type = (
|
||||
multi_instance_type # Some tasks have a repeat behavior.
|
||||
)
|
||||
self.multi_instance_count = (
|
||||
multi_instance_count # This is the number of times the task could repeat.
|
||||
)
|
||||
self.multi_instance_index = (
|
||||
multi_instance_index # And the index of the currently repeating task.
|
||||
)
|
||||
self.multi_instance_type = multi_instance_type # Some tasks have a repeat behavior.
|
||||
self.multi_instance_count = multi_instance_count # This is the number of times the task could repeat.
|
||||
self.multi_instance_index = multi_instance_index # And the index of the currently repeating task.
|
||||
self.process_identifier = process_identifier
|
||||
|
||||
self.properties = properties # Arbitrary extension properties from BPMN editor.
|
||||
|
@ -243,9 +231,7 @@ class FormFieldSchema(Schema):
|
|||
default_value = marshmallow.fields.String(required=False, allow_none=True)
|
||||
options = marshmallow.fields.List(marshmallow.fields.Nested(OptionSchema))
|
||||
validation = marshmallow.fields.List(marshmallow.fields.Nested(ValidationSchema))
|
||||
properties = marshmallow.fields.List(
|
||||
marshmallow.fields.Nested(FormFieldPropertySchema)
|
||||
)
|
||||
properties = marshmallow.fields.List(marshmallow.fields.Nested(FormFieldPropertySchema))
|
||||
|
||||
|
||||
# class FormSchema(Schema):
|
||||
|
|
|
@ -29,9 +29,7 @@ class UserModel(SpiffworkflowBaseDBModel):
|
|||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
username: str = db.Column(db.String(255), nullable=False, unique=True)
|
||||
|
||||
service = db.Column(
|
||||
db.String(255), nullable=False, unique=False
|
||||
) # not 'openid' -- google, aws
|
||||
service = db.Column(db.String(255), nullable=False, unique=False) # not 'openid' -- google, aws
|
||||
service_id = db.Column(db.String(255), nullable=False, unique=False)
|
||||
display_name = db.Column(db.String(255))
|
||||
email = db.Column(db.String(255))
|
||||
|
|
|
@ -12,9 +12,7 @@ class UserGroupAssignmentModel(SpiffworkflowBaseDBModel):
|
|||
"""UserGroupAssignmentModel."""
|
||||
|
||||
__tablename__ = "user_group_assignment"
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint("user_id", "group_id", name="user_group_assignment_unique"),
|
||||
)
|
||||
__table_args__ = (db.UniqueConstraint("user_id", "group_id", name="user_group_assignment_unique"),)
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
user_id = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore
|
||||
|
|
|
@ -15,11 +15,7 @@ class UserGroupAssignmentWaitingModel(SpiffworkflowBaseDBModel):
|
|||
|
||||
MATCH_ALL_USERS = "*"
|
||||
__tablename__ = "user_group_assignment_waiting"
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint(
|
||||
"username", "group_id", name="user_group_assignment_staged_unique"
|
||||
),
|
||||
)
|
||||
__table_args__ = (db.UniqueConstraint("username", "group_id", name="user_group_assignment_staged_unique"),)
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
username = db.Column(db.String(255), nullable=False)
|
||||
|
|
|
@ -28,9 +28,7 @@ def message_instance_list(
|
|||
message_instances_query = MessageInstanceModel.query
|
||||
|
||||
if process_instance_id:
|
||||
message_instances_query = message_instances_query.filter_by(
|
||||
process_instance_id=process_instance_id
|
||||
)
|
||||
message_instances_query = message_instances_query.filter_by(process_instance_id=process_instance_id)
|
||||
|
||||
message_instances = (
|
||||
message_instances_query.order_by(
|
||||
|
@ -70,10 +68,7 @@ def message_send(
|
|||
raise (
|
||||
ApiError(
|
||||
error_code="missing_payload",
|
||||
message=(
|
||||
"Please include a 'payload' in the JSON body that contains the"
|
||||
" message contents."
|
||||
),
|
||||
message="Please include a 'payload' in the JSON body that contains the message contents.",
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
|
@ -111,9 +106,7 @@ def message_send(
|
|||
)
|
||||
)
|
||||
|
||||
process_instance = ProcessInstanceModel.query.filter_by(
|
||||
id=receiver_message.process_instance_id
|
||||
).first()
|
||||
process_instance = ProcessInstanceModel.query.filter_by(id=receiver_message.process_instance_id).first()
|
||||
return Response(
|
||||
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
|
||||
status=200,
|
||||
|
|
|
@ -20,9 +20,7 @@ from flask import request
|
|||
from flask import url_for
|
||||
from werkzeug.wrappers import Response
|
||||
|
||||
openid_blueprint = Blueprint(
|
||||
"openid", __name__, template_folder="templates", static_folder="static"
|
||||
)
|
||||
openid_blueprint = Blueprint("openid", __name__, template_folder="templates", static_folder="static")
|
||||
|
||||
OPEN_ID_CODE = ":this_is_not_secure_do_not_use_in_production"
|
||||
|
||||
|
@ -60,10 +58,7 @@ def auth() -> str:
|
|||
def form_submit() -> Any:
|
||||
"""Handles the login form submission."""
|
||||
users = get_users()
|
||||
if (
|
||||
request.values["Uname"] in users
|
||||
and request.values["Pass"] == users[request.values["Uname"]]["password"]
|
||||
):
|
||||
if request.values["Uname"] in users and request.values["Pass"] == users[request.values["Uname"]]["password"]:
|
||||
# Redirect back to the end user with some detailed information
|
||||
state = request.values.get("state")
|
||||
data = {
|
||||
|
|
|
@ -46,9 +46,7 @@ def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.R
|
|||
raise (
|
||||
ApiError(
|
||||
error_code="could_not_requests_to_check",
|
||||
message=(
|
||||
"The key 'requests_to_check' not found at root of request body."
|
||||
),
|
||||
message="The key 'requests_to_check' not found at root of request body.",
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
|
@ -60,9 +58,7 @@ def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.R
|
|||
response_dict[target_uri] = {}
|
||||
|
||||
for http_method in http_methods:
|
||||
permission_string = AuthorizationService.get_permission_from_http_method(
|
||||
http_method
|
||||
)
|
||||
permission_string = AuthorizationService.get_permission_from_http_method(http_method)
|
||||
if permission_string:
|
||||
has_permission = AuthorizationService.user_has_permission(
|
||||
user=g.user,
|
||||
|
@ -98,10 +94,7 @@ def _process_data_fetcher(
|
|||
if file_data is None:
|
||||
raise ApiError(
|
||||
error_code="process_instance_file_data_not_found",
|
||||
message=(
|
||||
"Could not find file data related to the digest:"
|
||||
f" {process_data_identifier}"
|
||||
),
|
||||
message=f"Could not find file data related to the digest: {process_data_identifier}",
|
||||
)
|
||||
mimetype = file_data.mimetype
|
||||
filename = file_data.filename
|
||||
|
@ -169,9 +162,7 @@ def github_webhook_receive(body: Dict) -> Response:
|
|||
auth_header = request.headers.get("X-Hub-Signature-256")
|
||||
AuthorizationService.verify_sha256_token(auth_header)
|
||||
result = GitService.handle_web_hook(body)
|
||||
return Response(
|
||||
json.dumps({"git_pull": result}), status=200, mimetype="application/json"
|
||||
)
|
||||
return Response(json.dumps({"git_pull": result}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def task_data_update(
|
||||
|
@ -181,9 +172,7 @@ def task_data_update(
|
|||
body: Dict,
|
||||
) -> Response:
|
||||
"""Update task data."""
|
||||
process_instance = ProcessInstanceModel.query.filter(
|
||||
ProcessInstanceModel.id == int(process_instance_id)
|
||||
).first()
|
||||
process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first()
|
||||
if process_instance:
|
||||
if process_instance.status != "suspended":
|
||||
raise ProcessInstanceTaskDataCannotBeUpdatedError(
|
||||
|
@ -195,10 +184,7 @@ def task_data_update(
|
|||
if process_instance_data is None:
|
||||
raise ApiError(
|
||||
error_code="process_instance_data_not_found",
|
||||
message=(
|
||||
"Could not find task data related to process instance:"
|
||||
f" {process_instance.id}"
|
||||
),
|
||||
message=f"Could not find task data related to process instance: {process_instance.id}",
|
||||
)
|
||||
process_instance_data_dict = json.loads(process_instance_data.runtime_json)
|
||||
|
||||
|
@ -206,12 +192,8 @@ def task_data_update(
|
|||
new_task_data_str: str = body["new_task_data"]
|
||||
new_task_data_dict = json.loads(new_task_data_str)
|
||||
if task_id in process_instance_data_dict["tasks"]:
|
||||
process_instance_data_dict["tasks"][task_id][
|
||||
"data"
|
||||
] = new_task_data_dict
|
||||
process_instance_data.runtime_json = json.dumps(
|
||||
process_instance_data_dict
|
||||
)
|
||||
process_instance_data_dict["tasks"][task_id]["data"] = new_task_data_dict
|
||||
process_instance_data.runtime_json = json.dumps(process_instance_data_dict)
|
||||
db.session.add(process_instance_data)
|
||||
try:
|
||||
db.session.commit()
|
||||
|
@ -224,18 +206,12 @@ def task_data_update(
|
|||
else:
|
||||
raise ApiError(
|
||||
error_code="update_task_data_error",
|
||||
message=(
|
||||
f"Could not find Task: {task_id} in Instance:"
|
||||
f" {process_instance_id}."
|
||||
),
|
||||
message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.",
|
||||
)
|
||||
else:
|
||||
raise ApiError(
|
||||
error_code="update_task_data_error",
|
||||
message=(
|
||||
f"Could not update task data for Instance: {process_instance_id}, and"
|
||||
f" Task: {task_id}."
|
||||
),
|
||||
message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.",
|
||||
)
|
||||
return Response(
|
||||
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
|
||||
|
@ -268,9 +244,7 @@ def send_bpmn_event(
|
|||
body: Dict,
|
||||
) -> Response:
|
||||
"""Send a bpmn event to a workflow."""
|
||||
process_instance = ProcessInstanceModel.query.filter(
|
||||
ProcessInstanceModel.id == int(process_instance_id)
|
||||
).first()
|
||||
process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first()
|
||||
if process_instance:
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.send_bpmn_event(body)
|
||||
|
@ -294,18 +268,14 @@ def manual_complete_task(
|
|||
) -> Response:
|
||||
"""Mark a task complete without executing it."""
|
||||
execute = body.get("execute", True)
|
||||
process_instance = ProcessInstanceModel.query.filter(
|
||||
ProcessInstanceModel.id == int(process_instance_id)
|
||||
).first()
|
||||
process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first()
|
||||
if process_instance:
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.manual_complete_task(task_id, execute)
|
||||
else:
|
||||
raise ApiError(
|
||||
error_code="complete_task",
|
||||
message=(
|
||||
f"Could not complete Task {task_id} in Instance {process_instance_id}"
|
||||
),
|
||||
message=f"Could not complete Task {task_id} in Instance {process_instance_id}",
|
||||
)
|
||||
return Response(
|
||||
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
|
||||
|
@ -332,9 +302,7 @@ def _find_process_instance_by_id_or_raise(
|
|||
process_instance_id: int,
|
||||
) -> ProcessInstanceModel:
|
||||
"""Find_process_instance_by_id_or_raise."""
|
||||
process_instance_query = ProcessInstanceModel.query.filter_by(
|
||||
id=process_instance_id
|
||||
)
|
||||
process_instance_query = ProcessInstanceModel.query.filter_by(id=process_instance_id)
|
||||
|
||||
# we had a frustrating session trying to do joins and access columns from two tables. here's some notes for our future selves:
|
||||
# this returns an object that allows you to do: process_instance.UserModel.username
|
||||
|
|
|
@ -44,9 +44,7 @@ def process_group_create(body: dict) -> flask.wrappers.Response:
|
|||
)
|
||||
|
||||
ProcessModelService.add_process_group(process_group)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} added process group {process_group.id}"
|
||||
)
|
||||
_commit_and_push_to_git(f"User: {g.user.username} added process group {process_group.id}")
|
||||
return make_response(jsonify(process_group), 201)
|
||||
|
||||
|
||||
|
@ -63,22 +61,14 @@ def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Respo
|
|||
status_code=400,
|
||||
) from exception
|
||||
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} deleted process group {process_group_id}"
|
||||
)
|
||||
_commit_and_push_to_git(f"User: {g.user.username} deleted process group {process_group_id}")
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_group_update(
|
||||
modified_process_group_id: str, body: dict
|
||||
) -> flask.wrappers.Response:
|
||||
def process_group_update(modified_process_group_id: str, body: dict) -> flask.wrappers.Response:
|
||||
"""Process Group Update."""
|
||||
body_include_list = ["display_name", "description"]
|
||||
body_filtered = {
|
||||
include_item: body[include_item]
|
||||
for include_item in body_include_list
|
||||
if include_item in body
|
||||
}
|
||||
body_filtered = {include_item: body[include_item] for include_item in body_include_list if include_item in body}
|
||||
|
||||
process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
|
||||
if not ProcessModelService.is_process_group_identifier(process_group_id):
|
||||
|
@ -90,9 +80,7 @@ def process_group_update(
|
|||
|
||||
process_group = ProcessGroup(id=process_group_id, **body_filtered)
|
||||
ProcessModelService.update_process_group(process_group)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} updated process group {process_group_id}"
|
||||
)
|
||||
_commit_and_push_to_git(f"User: {g.user.username} updated process group {process_group_id}")
|
||||
return make_response(jsonify(process_group), 200)
|
||||
|
||||
|
||||
|
@ -101,14 +89,10 @@ def process_group_list(
|
|||
) -> flask.wrappers.Response:
|
||||
"""Process_group_list."""
|
||||
if process_group_identifier is not None:
|
||||
process_groups = ProcessModelService.get_process_groups(
|
||||
process_group_identifier
|
||||
)
|
||||
process_groups = ProcessModelService.get_process_groups(process_group_identifier)
|
||||
else:
|
||||
process_groups = ProcessModelService.get_process_groups()
|
||||
batch = ProcessModelService().get_batch(
|
||||
items=process_groups, page=page, per_page=per_page
|
||||
)
|
||||
batch = ProcessModelService().get_batch(items=process_groups, page=page, per_page=per_page)
|
||||
pages = len(process_groups) // per_page
|
||||
remainder = len(process_groups) % per_page
|
||||
if remainder > 0:
|
||||
|
@ -141,24 +125,15 @@ def process_group_show(
|
|||
)
|
||||
) from exception
|
||||
|
||||
process_group.parent_groups = ProcessModelService.get_parent_group_array(
|
||||
process_group.id
|
||||
)
|
||||
process_group.parent_groups = ProcessModelService.get_parent_group_array(process_group.id)
|
||||
return make_response(jsonify(process_group), 200)
|
||||
|
||||
|
||||
def process_group_move(
|
||||
modified_process_group_identifier: str, new_location: str
|
||||
) -> flask.wrappers.Response:
|
||||
def process_group_move(modified_process_group_identifier: str, new_location: str) -> flask.wrappers.Response:
|
||||
"""Process_group_move."""
|
||||
original_process_group_id = _un_modify_modified_process_model_id(
|
||||
modified_process_group_identifier
|
||||
)
|
||||
new_process_group = ProcessModelService().process_group_move(
|
||||
original_process_group_id, new_location
|
||||
)
|
||||
original_process_group_id = _un_modify_modified_process_model_id(modified_process_group_identifier)
|
||||
new_process_group = ProcessModelService().process_group_move(original_process_group_id, new_location)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} moved process group {original_process_group_id} to"
|
||||
f" {new_process_group.id}"
|
||||
f"User: {g.user.username} moved process group {original_process_group_id} to {new_process_group.id}"
|
||||
)
|
||||
return make_response(jsonify(new_process_group), 200)
|
||||
|
|
|
@ -88,9 +88,7 @@ def process_instance_create(
|
|||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Create_process_instance."""
|
||||
process_model_identifier = _un_modify_modified_process_model_id(
|
||||
modified_process_model_identifier
|
||||
)
|
||||
process_model_identifier = _un_modify_modified_process_model_id(modified_process_model_identifier)
|
||||
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
if process_model.primary_file_name is None:
|
||||
|
@ -103,10 +101,8 @@ def process_instance_create(
|
|||
status_code=400,
|
||||
)
|
||||
|
||||
process_instance = (
|
||||
ProcessInstanceService.create_process_instance_from_process_model_identifier(
|
||||
process_model_identifier, g.user
|
||||
)
|
||||
process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier(
|
||||
process_model_identifier, g.user
|
||||
)
|
||||
ProcessInstanceQueueService.enqueue(process_instance)
|
||||
return Response(
|
||||
|
@ -126,10 +122,7 @@ def process_instance_run(
|
|||
if process_instance.status != "not_started":
|
||||
raise ApiError(
|
||||
error_code="process_instance_not_runnable",
|
||||
message=(
|
||||
f"Process Instance ({process_instance.id}) is currently running or has"
|
||||
" already run."
|
||||
),
|
||||
message=f"Process Instance ({process_instance.id}) is currently running or has already run.",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
|
@ -163,15 +156,11 @@ def process_instance_run(
|
|||
if not current_app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]:
|
||||
MessageService.correlate_all_message_instances()
|
||||
|
||||
process_instance_api = ProcessInstanceService.processor_to_process_instance_api(
|
||||
processor
|
||||
)
|
||||
process_instance_api = ProcessInstanceService.processor_to_process_instance_api(processor)
|
||||
process_instance_data = processor.get_data()
|
||||
process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api)
|
||||
process_instance_metadata["data"] = process_instance_data
|
||||
return Response(
|
||||
json.dumps(process_instance_metadata), status=200, mimetype="application/json"
|
||||
)
|
||||
return Response(json.dumps(process_instance_metadata), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_instance_terminate(
|
||||
|
@ -216,9 +205,7 @@ def process_instance_log_list(
|
|||
# to make sure the process instance exists
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
|
||||
log_query = SpiffLoggingModel.query.filter(
|
||||
SpiffLoggingModel.process_instance_id == process_instance.id
|
||||
)
|
||||
log_query = SpiffLoggingModel.query.filter(SpiffLoggingModel.process_instance_id == process_instance.id)
|
||||
if not detailed:
|
||||
log_query = log_query.filter(
|
||||
# 1. this was the previous implementation, where we only show completed tasks and skipped tasks.
|
||||
|
@ -231,9 +218,7 @@ def process_instance_log_list(
|
|||
# we decided to remove them, since they get really chatty when there are lots of subprocesses and call activities.
|
||||
and_(
|
||||
SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore
|
||||
SpiffLoggingModel.bpmn_task_type.in_( # type: ignore
|
||||
["Default Throwing Event"]
|
||||
),
|
||||
SpiffLoggingModel.bpmn_task_type.in_(["Default Throwing Event"]), # type: ignore
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -317,9 +302,7 @@ def process_instance_list(
|
|||
report_filter_by: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_list."""
|
||||
process_instance_report = ProcessInstanceReportService.report_with_identifier(
|
||||
g.user, report_id, report_identifier
|
||||
)
|
||||
process_instance_report = ProcessInstanceReportService.report_with_identifier(g.user, report_id, report_identifier)
|
||||
|
||||
report_column_list = None
|
||||
if report_columns:
|
||||
|
@ -343,21 +326,19 @@ def process_instance_list(
|
|||
report_filter_by_list=report_filter_by_list,
|
||||
)
|
||||
else:
|
||||
report_filter = (
|
||||
ProcessInstanceReportService.filter_from_metadata_with_overrides(
|
||||
process_instance_report=process_instance_report,
|
||||
process_model_identifier=process_model_identifier,
|
||||
user_group_identifier=user_group_identifier,
|
||||
start_from=start_from,
|
||||
start_to=start_to,
|
||||
end_from=end_from,
|
||||
end_to=end_to,
|
||||
process_status=process_status,
|
||||
with_relation_to_me=with_relation_to_me,
|
||||
process_initiator_username=process_initiator_username,
|
||||
report_column_list=report_column_list,
|
||||
report_filter_by_list=report_filter_by_list,
|
||||
)
|
||||
report_filter = ProcessInstanceReportService.filter_from_metadata_with_overrides(
|
||||
process_instance_report=process_instance_report,
|
||||
process_model_identifier=process_model_identifier,
|
||||
user_group_identifier=user_group_identifier,
|
||||
start_from=start_from,
|
||||
start_to=start_to,
|
||||
end_from=end_from,
|
||||
end_to=end_to,
|
||||
process_status=process_status,
|
||||
with_relation_to_me=with_relation_to_me,
|
||||
process_initiator_username=process_initiator_username,
|
||||
report_column_list=report_column_list,
|
||||
report_filter_by_list=report_filter_by_list,
|
||||
)
|
||||
|
||||
response_json = ProcessInstanceReportService.run_process_instance_report(
|
||||
|
@ -381,8 +362,7 @@ def process_instance_report_column_list() -> flask.wrappers.Response:
|
|||
.all()
|
||||
)
|
||||
columns_for_metadata_strings = [
|
||||
{"Header": i[0], "accessor": i[0], "filterable": True}
|
||||
for i in columns_for_metadata
|
||||
{"Header": i[0], "accessor": i[0], "filterable": True} for i in columns_for_metadata
|
||||
]
|
||||
return make_response(jsonify(table_columns + columns_for_metadata_strings), 200)
|
||||
|
||||
|
@ -429,23 +409,15 @@ def process_instance_delete(
|
|||
|
||||
# (Pdb) db.session.delete
|
||||
# <bound method delete of <sqlalchemy.orm.scoping.scoped_session object at 0x103eaab30>>
|
||||
db.session.query(SpiffLoggingModel).filter_by(
|
||||
process_instance_id=process_instance.id
|
||||
).delete()
|
||||
db.session.query(SpiffStepDetailsModel).filter_by(
|
||||
process_instance_id=process_instance.id
|
||||
).delete()
|
||||
db.session.query(ProcessInstanceQueueModel).filter_by(
|
||||
process_instance_id=process_instance.id
|
||||
).delete()
|
||||
db.session.query(SpiffLoggingModel).filter_by(process_instance_id=process_instance.id).delete()
|
||||
db.session.query(SpiffStepDetailsModel).filter_by(process_instance_id=process_instance.id).delete()
|
||||
db.session.query(ProcessInstanceQueueModel).filter_by(process_instance_id=process_instance.id).delete()
|
||||
db.session.delete(process_instance)
|
||||
db.session.commit()
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_instance_report_list(
|
||||
page: int = 1, per_page: int = 100
|
||||
) -> flask.wrappers.Response:
|
||||
def process_instance_report_list(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
|
||||
"""Process_instance_report_list."""
|
||||
process_instance_reports = ProcessInstanceReportModel.query.filter_by(
|
||||
created_by_id=g.user.id,
|
||||
|
@ -530,9 +502,7 @@ def process_instance_report_show(
|
|||
)
|
||||
|
||||
substitution_variables = request.args.to_dict()
|
||||
result_dict = process_instance_report.generate_report(
|
||||
process_instances.items, substitution_variables
|
||||
)
|
||||
result_dict = process_instance_report.generate_report(process_instances.items, substitution_variables)
|
||||
|
||||
# update this if we go back to a database query instead of filtering in memory
|
||||
result_dict["pagination"] = {
|
||||
|
@ -593,9 +563,7 @@ def process_instance_task_list(
|
|||
)
|
||||
|
||||
if spiff_step > 0:
|
||||
step_detail_query = step_detail_query.filter(
|
||||
SpiffStepDetailsModel.spiff_step <= spiff_step
|
||||
)
|
||||
step_detail_query = step_detail_query.filter(SpiffStepDetailsModel.spiff_step <= spiff_step)
|
||||
|
||||
step_details = step_detail_query.all()
|
||||
|
||||
|
@ -619,9 +587,7 @@ def process_instance_task_list(
|
|||
for spiff_task in subprocess["tasks"].values():
|
||||
restore_task(spiff_task, last_change)
|
||||
|
||||
bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict(
|
||||
full_bpmn_process_dict
|
||||
)
|
||||
bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict(full_bpmn_process_dict)
|
||||
if spiff_step > 0:
|
||||
bpmn_process_instance.complete_task_from_id(UUID(step_details[-1].task_id))
|
||||
for subprocess_id, subprocess in bpmn_process_instance.subprocesses.items():
|
||||
|
@ -653,8 +619,7 @@ def process_instance_task_list(
|
|||
current_tasks[row_id] = spiff_task
|
||||
if (
|
||||
row_id not in spiff_tasks_by_process_id_and_task_name
|
||||
or spiff_task.state
|
||||
> spiff_tasks_by_process_id_and_task_name[row_id].state
|
||||
or spiff_task.state > spiff_tasks_by_process_id_and_task_name[row_id].state
|
||||
):
|
||||
spiff_tasks_by_process_id_and_task_name[row_id] = spiff_task
|
||||
spiff_tasks_by_process_id_and_task_name.update(current_tasks)
|
||||
|
@ -665,9 +630,7 @@ def process_instance_task_list(
|
|||
task_spiff_step: Optional[int] = None
|
||||
if str(spiff_task.id) in steps_by_id:
|
||||
task_spiff_step = steps_by_id[str(spiff_task.id)].spiff_step
|
||||
calling_subprocess_task_id = subprocesses_by_child_task_ids.get(
|
||||
str(spiff_task.id), None
|
||||
)
|
||||
calling_subprocess_task_id = subprocesses_by_child_task_ids.get(str(spiff_task.id), None)
|
||||
task = ProcessInstanceService.spiff_task_to_api_task(
|
||||
processor,
|
||||
spiff_task,
|
||||
|
@ -698,14 +661,10 @@ def process_instance_find_by_id(
|
|||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_find_by_id."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
modified_process_model_identifier = (
|
||||
ProcessModelInfo.modify_process_identifier_for_path_param(
|
||||
process_instance.process_model_identifier
|
||||
)
|
||||
)
|
||||
process_instance_uri = (
|
||||
f"/process-instances/{modified_process_model_identifier}/{process_instance.id}"
|
||||
modified_process_model_identifier = ProcessModelInfo.modify_process_identifier_for_path_param(
|
||||
process_instance.process_model_identifier
|
||||
)
|
||||
process_instance_uri = f"/process-instances/{modified_process_model_identifier}/{process_instance.id}"
|
||||
has_permission = AuthorizationService.user_has_permission(
|
||||
user=g.user,
|
||||
permission="read",
|
||||
|
@ -739,32 +698,22 @@ def _get_process_instance(
|
|||
process_model_with_diagram = None
|
||||
name_of_file_with_diagram = None
|
||||
if process_identifier:
|
||||
spec_reference = SpecReferenceCache.query.filter_by(
|
||||
identifier=process_identifier, type="process"
|
||||
).first()
|
||||
spec_reference = SpecReferenceCache.query.filter_by(identifier=process_identifier, type="process").first()
|
||||
if spec_reference is None:
|
||||
raise SpecReferenceNotFoundError(
|
||||
"Could not find given process identifier in the cache:"
|
||||
f" {process_identifier}"
|
||||
f"Could not find given process identifier in the cache: {process_identifier}"
|
||||
)
|
||||
|
||||
process_model_with_diagram = ProcessModelService.get_process_model(
|
||||
spec_reference.process_model_id
|
||||
)
|
||||
process_model_with_diagram = ProcessModelService.get_process_model(spec_reference.process_model_id)
|
||||
name_of_file_with_diagram = spec_reference.file_name
|
||||
process_instance.process_model_with_diagram_identifier = (
|
||||
process_model_with_diagram.id
|
||||
)
|
||||
process_instance.process_model_with_diagram_identifier = process_model_with_diagram.id
|
||||
else:
|
||||
process_model_with_diagram = _get_process_model(process_model_identifier)
|
||||
if process_model_with_diagram.primary_file_name:
|
||||
name_of_file_with_diagram = process_model_with_diagram.primary_file_name
|
||||
|
||||
if process_model_with_diagram and name_of_file_with_diagram:
|
||||
if (
|
||||
process_instance.bpmn_version_control_identifier
|
||||
== current_version_control_revision
|
||||
):
|
||||
if process_instance.bpmn_version_control_identifier == current_version_control_revision:
|
||||
bpmn_xml_file_contents = SpecFileService.get_data(
|
||||
process_model_with_diagram, name_of_file_with_diagram
|
||||
).decode("utf-8")
|
||||
|
@ -807,10 +756,7 @@ def _find_process_instance_for_me_or_raise(
|
|||
raise (
|
||||
ApiError(
|
||||
error_code="process_instance_cannot_be_found",
|
||||
message=(
|
||||
f"Process instance with id {process_instance_id} cannot be found"
|
||||
" that is associated with you."
|
||||
),
|
||||
message=f"Process instance with id {process_instance_id} cannot be found that is associated with you.",
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
|
|
|
@ -63,11 +63,7 @@ def process_model_create(
|
|||
"fault_or_suspend_on_exception",
|
||||
"exception_notification_addresses",
|
||||
]
|
||||
body_filtered = {
|
||||
include_item: body[include_item]
|
||||
for include_item in body_include_list
|
||||
if include_item in body
|
||||
}
|
||||
body_filtered = {include_item: body[include_item] for include_item in body_include_list if include_item in body}
|
||||
|
||||
_get_process_group_from_modified_identifier(modified_process_group_id)
|
||||
|
||||
|
@ -82,25 +78,19 @@ def process_model_create(
|
|||
if ProcessModelService.is_process_model_identifier(process_model_info.id):
|
||||
raise ApiError(
|
||||
error_code="process_model_with_id_already_exists",
|
||||
message=(
|
||||
f"Process Model with given id already exists: {process_model_info.id}"
|
||||
),
|
||||
message=f"Process Model with given id already exists: {process_model_info.id}",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
if ProcessModelService.is_process_group_identifier(process_model_info.id):
|
||||
raise ApiError(
|
||||
error_code="process_group_with_id_already_exists",
|
||||
message=(
|
||||
f"Process Group with given id already exists: {process_model_info.id}"
|
||||
),
|
||||
message=f"Process Group with given id already exists: {process_model_info.id}",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
ProcessModelService.add_process_model(process_model_info)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} created process model {process_model_info.id}"
|
||||
)
|
||||
_commit_and_push_to_git(f"User: {g.user.username} created process model {process_model_info.id}")
|
||||
return Response(
|
||||
json.dumps(ProcessModelInfoSchema().dump(process_model_info)),
|
||||
status=201,
|
||||
|
@ -122,9 +112,7 @@ def process_model_delete(
|
|||
status_code=400,
|
||||
) from exception
|
||||
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} deleted process model {process_model_identifier}"
|
||||
)
|
||||
_commit_and_push_to_git(f"User: {g.user.username} deleted process model {process_model_identifier}")
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
|
@ -143,11 +131,7 @@ def process_model_update(
|
|||
"fault_or_suspend_on_exception",
|
||||
"exception_notification_addresses",
|
||||
]
|
||||
body_filtered = {
|
||||
include_item: body[include_item]
|
||||
for include_item in body_include_list
|
||||
if include_item in body
|
||||
}
|
||||
body_filtered = {include_item: body[include_item] for include_item in body_include_list if include_item in body}
|
||||
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
|
||||
|
@ -156,10 +140,7 @@ def process_model_update(
|
|||
# All we really need this for is to get the process id from a bpmn file so maybe that could
|
||||
# all be moved to FileSystemService.
|
||||
update_primary_bpmn_file = False
|
||||
if (
|
||||
"primary_file_name" in body_filtered
|
||||
and "primary_process_id" not in body_filtered
|
||||
):
|
||||
if "primary_file_name" in body_filtered and "primary_process_id" not in body_filtered:
|
||||
if process_model.primary_file_name != body_filtered["primary_file_name"]:
|
||||
update_primary_bpmn_file = True
|
||||
|
||||
|
@ -167,22 +148,14 @@ def process_model_update(
|
|||
|
||||
# update the file to ensure we get the correct process id if the primary file changed.
|
||||
if update_primary_bpmn_file and process_model.primary_file_name:
|
||||
primary_file_contents = SpecFileService.get_data(
|
||||
process_model, process_model.primary_file_name
|
||||
)
|
||||
SpecFileService.update_file(
|
||||
process_model, process_model.primary_file_name, primary_file_contents
|
||||
)
|
||||
primary_file_contents = SpecFileService.get_data(process_model, process_model.primary_file_name)
|
||||
SpecFileService.update_file(process_model, process_model.primary_file_name, primary_file_contents)
|
||||
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} updated process model {process_model_identifier}"
|
||||
)
|
||||
_commit_and_push_to_git(f"User: {g.user.username} updated process model {process_model_identifier}")
|
||||
return ProcessModelInfoSchema().dump(process_model)
|
||||
|
||||
|
||||
def process_model_show(
|
||||
modified_process_model_identifier: str, include_file_references: bool = False
|
||||
) -> Any:
|
||||
def process_model_show(modified_process_model_identifier: str, include_file_references: bool = False) -> Any:
|
||||
"""Process_model_show."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
|
@ -194,13 +167,9 @@ def process_model_show(
|
|||
|
||||
if include_file_references:
|
||||
for file in process_model.files:
|
||||
file.references = SpecFileService.get_references_for_file(
|
||||
file, process_model
|
||||
)
|
||||
file.references = SpecFileService.get_references_for_file(file, process_model)
|
||||
|
||||
process_model.parent_groups = ProcessModelService.get_parent_group_array(
|
||||
process_model.id
|
||||
)
|
||||
process_model.parent_groups = ProcessModelService.get_parent_group_array(process_model.id)
|
||||
try:
|
||||
current_git_revision = GitService.get_current_revision()
|
||||
except GitCommandError:
|
||||
|
@ -210,19 +179,12 @@ def process_model_show(
|
|||
return make_response(jsonify(process_model), 200)
|
||||
|
||||
|
||||
def process_model_move(
|
||||
modified_process_model_identifier: str, new_location: str
|
||||
) -> flask.wrappers.Response:
|
||||
def process_model_move(modified_process_model_identifier: str, new_location: str) -> flask.wrappers.Response:
|
||||
"""Process_model_move."""
|
||||
original_process_model_id = _un_modify_modified_process_model_id(
|
||||
modified_process_model_identifier
|
||||
)
|
||||
new_process_model = ProcessModelService().process_model_move(
|
||||
original_process_model_id, new_location
|
||||
)
|
||||
original_process_model_id = _un_modify_modified_process_model_id(modified_process_model_identifier)
|
||||
new_process_model = ProcessModelService().process_model_move(original_process_model_id, new_location)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} moved process model {original_process_model_id} to"
|
||||
f" {new_process_model.id}"
|
||||
f"User: {g.user.username} moved process model {original_process_model_id} to {new_process_model.id}"
|
||||
)
|
||||
return make_response(jsonify(new_process_model), 200)
|
||||
|
||||
|
@ -232,17 +194,13 @@ def process_model_publish(
|
|||
) -> flask.wrappers.Response:
|
||||
"""Process_model_publish."""
|
||||
if branch_to_update is None:
|
||||
branch_to_update = current_app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"
|
||||
]
|
||||
branch_to_update = current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"]
|
||||
if branch_to_update is None:
|
||||
raise MissingGitConfigsError(
|
||||
"Missing config for SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH. "
|
||||
"This is required for publishing process models"
|
||||
)
|
||||
process_model_identifier = _un_modify_modified_process_model_id(
|
||||
modified_process_model_identifier
|
||||
)
|
||||
process_model_identifier = _un_modify_modified_process_model_id(modified_process_model_identifier)
|
||||
pr_url = GitService().publish(process_model_identifier, branch_to_update)
|
||||
data = {"ok": True, "pr_url": pr_url}
|
||||
return Response(json.dumps(data), status=200, mimetype="application/json")
|
||||
|
@ -262,21 +220,15 @@ def process_model_list(
|
|||
recursive=recursive,
|
||||
filter_runnable_by_user=filter_runnable_by_user,
|
||||
)
|
||||
process_models_to_return = ProcessModelService().get_batch(
|
||||
process_models, page=page, per_page=per_page
|
||||
)
|
||||
process_models_to_return = ProcessModelService().get_batch(process_models, page=page, per_page=per_page)
|
||||
|
||||
if include_parent_groups:
|
||||
process_group_cache = IdToProcessGroupMapping({})
|
||||
for process_model in process_models_to_return:
|
||||
parent_group_lites_with_cache = (
|
||||
ProcessModelService.get_parent_group_array_and_cache_it(
|
||||
process_model.id, process_group_cache
|
||||
)
|
||||
parent_group_lites_with_cache = ProcessModelService.get_parent_group_array_and_cache_it(
|
||||
process_model.id, process_group_cache
|
||||
)
|
||||
process_model.parent_groups = parent_group_lites_with_cache[
|
||||
"process_groups"
|
||||
]
|
||||
process_model.parent_groups = parent_group_lites_with_cache["process_groups"]
|
||||
|
||||
pages = len(process_models) // per_page
|
||||
remainder = len(process_models) % per_page
|
||||
|
@ -293,19 +245,13 @@ def process_model_list(
|
|||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
def process_model_file_update(
|
||||
modified_process_model_identifier: str, file_name: str
|
||||
) -> flask.wrappers.Response:
|
||||
def process_model_file_update(modified_process_model_identifier: str, file_name: str) -> flask.wrappers.Response:
|
||||
"""Process_model_file_update."""
|
||||
message = f"User: {g.user.username} clicked save for"
|
||||
return _create_or_update_process_model_file(
|
||||
modified_process_model_identifier, message, 200
|
||||
)
|
||||
return _create_or_update_process_model_file(modified_process_model_identifier, message, 200)
|
||||
|
||||
|
||||
def process_model_file_delete(
|
||||
modified_process_model_identifier: str, file_name: str
|
||||
) -> flask.wrappers.Response:
|
||||
def process_model_file_delete(modified_process_model_identifier: str, file_name: str) -> flask.wrappers.Response:
|
||||
"""Process_model_file_delete."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
|
@ -333,8 +279,7 @@ def process_model_file_delete(
|
|||
) from exception
|
||||
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} deleted process model file"
|
||||
f" {process_model_identifier}/{file_name}"
|
||||
f"User: {g.user.username} deleted process model file {process_model_identifier}/{file_name}"
|
||||
)
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
@ -344,14 +289,10 @@ def process_model_file_create(
|
|||
) -> flask.wrappers.Response:
|
||||
"""Process_model_file_create."""
|
||||
message = f"User: {g.user.username} added process model file"
|
||||
return _create_or_update_process_model_file(
|
||||
modified_process_model_identifier, message, 201
|
||||
)
|
||||
return _create_or_update_process_model_file(modified_process_model_identifier, message, 201)
|
||||
|
||||
|
||||
def process_model_file_show(
|
||||
modified_process_model_identifier: str, file_name: str
|
||||
) -> Any:
|
||||
def process_model_file_show(modified_process_model_identifier: str, file_name: str) -> Any:
|
||||
"""Process_model_file_show."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
|
@ -360,8 +301,7 @@ def process_model_file_show(
|
|||
raise ApiError(
|
||||
error_code="unknown file",
|
||||
message=(
|
||||
f"No information exists for file {file_name}"
|
||||
f" it does not exist in workflow {process_model_identifier}."
|
||||
f"No information exists for file {file_name} it does not exist in workflow {process_model_identifier}."
|
||||
),
|
||||
status_code=404,
|
||||
)
|
||||
|
@ -382,17 +322,13 @@ def process_model_create_with_natural_language(
|
|||
) -> flask.wrappers.Response:
|
||||
"""Process_model_create_with_natural_language."""
|
||||
pattern = re.compile(
|
||||
r"Create a (?P<pm_name>.*?) process model with a (?P<form_name>.*?) form that"
|
||||
r" collects (?P<columns>.*)"
|
||||
r"Create a (?P<pm_name>.*?) process model with a (?P<form_name>.*?) form that" r" collects (?P<columns>.*)"
|
||||
)
|
||||
match = pattern.match(body["natural_language_text"])
|
||||
if match is None:
|
||||
raise ApiError(
|
||||
error_code="natural_language_text_not_yet_supported",
|
||||
message=(
|
||||
"Natural language text is not yet supported. Please use the form:"
|
||||
f" {pattern.pattern}"
|
||||
),
|
||||
message=f"Natural language text is not yet supported. Please use the form: {pattern.pattern}",
|
||||
status_code=400,
|
||||
)
|
||||
process_model_display_name = match.group("pm_name")
|
||||
|
@ -406,12 +342,8 @@ def process_model_create_with_natural_language(
|
|||
column_names = match.group("columns")
|
||||
columns = re.sub(r"(, (and )?)", ",", column_names).split(",")
|
||||
|
||||
process_group = _get_process_group_from_modified_identifier(
|
||||
modified_process_group_id
|
||||
)
|
||||
qualified_process_model_identifier = (
|
||||
f"{process_group.id}/{process_model_identifier}"
|
||||
)
|
||||
process_group = _get_process_group_from_modified_identifier(modified_process_group_id)
|
||||
qualified_process_model_identifier = f"{process_group.id}/{process_model_identifier}"
|
||||
|
||||
metadata_extraction_paths = []
|
||||
for column in columns:
|
||||
|
@ -432,9 +364,7 @@ def process_model_create_with_natural_language(
|
|||
status_code=400,
|
||||
)
|
||||
|
||||
bpmn_template_file = os.path.join(
|
||||
current_app.root_path, "templates", "basic_with_user_task_template.bpmn"
|
||||
)
|
||||
bpmn_template_file = os.path.join(current_app.root_path, "templates", "basic_with_user_task_template.bpmn")
|
||||
if not os.path.exists(bpmn_template_file):
|
||||
raise ApiError(
|
||||
error_code="bpmn_template_file_does_not_exist",
|
||||
|
@ -451,9 +381,7 @@ def process_model_create_with_natural_language(
|
|||
bpmn_template_contents = bpmn_template_contents.replace(
|
||||
"natural_language_process_id_template", bpmn_process_identifier
|
||||
)
|
||||
bpmn_template_contents = bpmn_template_contents.replace(
|
||||
"form-identifier-id-template", form_identifier
|
||||
)
|
||||
bpmn_template_contents = bpmn_template_contents.replace("form-identifier-id-template", form_identifier)
|
||||
|
||||
form_uischema_json: dict = {"ui:order": columns}
|
||||
|
||||
|
@ -487,21 +415,14 @@ def process_model_create_with_natural_language(
|
|||
)
|
||||
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} created process model via natural language:"
|
||||
f" {process_model_info.id}"
|
||||
f"User: {g.user.username} created process model via natural language: {process_model_info.id}"
|
||||
)
|
||||
|
||||
default_report_metadata = ProcessInstanceReportService.system_metadata_map(
|
||||
"default"
|
||||
)
|
||||
default_report_metadata = ProcessInstanceReportService.system_metadata_map("default")
|
||||
if default_report_metadata is None:
|
||||
raise ProcessInstanceReportNotFoundError(
|
||||
"Could not find a report with identifier 'default'"
|
||||
)
|
||||
raise ProcessInstanceReportNotFoundError("Could not find a report with identifier 'default'")
|
||||
for column in columns:
|
||||
default_report_metadata["columns"].append(
|
||||
{"Header": column, "accessor": column, "filterable": True}
|
||||
)
|
||||
default_report_metadata["columns"].append({"Header": column, "accessor": column, "filterable": True})
|
||||
ProcessInstanceReportModel.create_report(
|
||||
identifier=process_model_identifier,
|
||||
user=g.user,
|
||||
|
@ -534,16 +455,11 @@ def _get_process_group_from_modified_identifier(
|
|||
if modified_process_group_id is None:
|
||||
raise ApiError(
|
||||
error_code="process_group_id_not_specified",
|
||||
message=(
|
||||
"Process Model could not be created when process_group_id path param is"
|
||||
" unspecified"
|
||||
),
|
||||
message="Process Model could not be created when process_group_id path param is unspecified",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
unmodified_process_group_id = _un_modify_modified_process_model_id(
|
||||
modified_process_group_id
|
||||
)
|
||||
unmodified_process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
|
||||
process_group = ProcessModelService.get_process_group(unmodified_process_group_id)
|
||||
if process_group is None:
|
||||
raise ApiError(
|
||||
|
@ -584,26 +500,19 @@ def _create_or_update_process_model_file(
|
|||
|
||||
file = None
|
||||
try:
|
||||
file = SpecFileService.update_file(
|
||||
process_model, request_file.filename, request_file_contents
|
||||
)
|
||||
file = SpecFileService.update_file(process_model, request_file.filename, request_file_contents)
|
||||
except ProcessModelFileInvalidError as exception:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="process_model_file_invalid",
|
||||
message=(
|
||||
f"Invalid Process model file: {request_file.filename}."
|
||||
f" Received error: {str(exception)}"
|
||||
),
|
||||
message=f"Invalid Process model file: {request_file.filename}. Received error: {str(exception)}",
|
||||
status_code=400,
|
||||
)
|
||||
) from exception
|
||||
file_contents = SpecFileService.get_data(process_model, file.name)
|
||||
file.file_contents = file_contents
|
||||
file.process_model_id = process_model.id
|
||||
_commit_and_push_to_git(
|
||||
f"{message_for_git_commit} {process_model_identifier}/{file.name}"
|
||||
)
|
||||
_commit_and_push_to_git(f"{message_for_git_commit} {process_model_identifier}/{file.name}")
|
||||
|
||||
return Response(
|
||||
json.dumps(FileSchema().dump(file)),
|
||||
|
|
|
@ -26,13 +26,9 @@ def script_unit_test_create(
|
|||
modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
|
||||
) -> flask.wrappers.Response:
|
||||
"""Script_unit_test_create."""
|
||||
bpmn_task_identifier = _get_required_parameter_or_raise(
|
||||
"bpmn_task_identifier", body
|
||||
)
|
||||
bpmn_task_identifier = _get_required_parameter_or_raise("bpmn_task_identifier", body)
|
||||
input_json = _get_required_parameter_or_raise("input_json", body)
|
||||
expected_output_json = _get_required_parameter_or_raise(
|
||||
"expected_output_json", body
|
||||
)
|
||||
expected_output_json = _get_required_parameter_or_raise("expected_output_json", body)
|
||||
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
|
@ -40,10 +36,7 @@ def script_unit_test_create(
|
|||
if file is None:
|
||||
raise ApiError(
|
||||
error_code="cannot_find_file",
|
||||
message=(
|
||||
"Could not find the primary bpmn file for process_model:"
|
||||
f" {process_model.id}"
|
||||
),
|
||||
message=f"Could not find the primary bpmn file for process_model: {process_model.id}",
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
|
@ -52,9 +45,7 @@ def script_unit_test_create(
|
|||
bpmn_etree_element = SpecFileService.get_etree_from_xml_bytes(file_contents)
|
||||
|
||||
nsmap = bpmn_etree_element.nsmap
|
||||
spiff_element_maker = ElementMaker(
|
||||
namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap
|
||||
)
|
||||
spiff_element_maker = ElementMaker(namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap)
|
||||
|
||||
script_task_elements = bpmn_etree_element.xpath(
|
||||
f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']",
|
||||
|
@ -74,9 +65,7 @@ def script_unit_test_create(
|
|||
namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
|
||||
)
|
||||
if len(extension_elements_array) == 0:
|
||||
bpmn_element_maker = ElementMaker(
|
||||
namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap
|
||||
)
|
||||
bpmn_element_maker = ElementMaker(namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap)
|
||||
extension_elements = bpmn_element_maker("extensionElements")
|
||||
script_task_element.append(extension_elements)
|
||||
else:
|
||||
|
@ -93,23 +82,16 @@ def script_unit_test_create(
|
|||
else:
|
||||
unit_test_elements = unit_test_elements_array[0]
|
||||
|
||||
fuzz = "".join(
|
||||
random.choice(string.ascii_uppercase + string.digits) # noqa: S311
|
||||
for _ in range(7)
|
||||
)
|
||||
fuzz = "".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(7)) # noqa: S311
|
||||
unit_test_id = f"unit_test_{fuzz}"
|
||||
|
||||
input_json_element = spiff_element_maker("inputJson", json.dumps(input_json))
|
||||
expected_output_json_element = spiff_element_maker(
|
||||
"expectedOutputJson", json.dumps(expected_output_json)
|
||||
)
|
||||
expected_output_json_element = spiff_element_maker("expectedOutputJson", json.dumps(expected_output_json))
|
||||
unit_test_element = spiff_element_maker("unitTest", id=unit_test_id)
|
||||
unit_test_element.append(input_json_element)
|
||||
unit_test_element.append(expected_output_json_element)
|
||||
unit_test_elements.append(unit_test_element)
|
||||
SpecFileService.update_file(
|
||||
process_model, file.name, etree.tostring(bpmn_etree_element)
|
||||
)
|
||||
SpecFileService.update_file(process_model, file.name, etree.tostring(bpmn_etree_element))
|
||||
|
||||
return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
|
||||
|
||||
|
@ -124,9 +106,7 @@ def script_unit_test_run(
|
|||
|
||||
python_script = _get_required_parameter_or_raise("python_script", body)
|
||||
input_json = _get_required_parameter_or_raise("input_json", body)
|
||||
expected_output_json = _get_required_parameter_or_raise(
|
||||
"expected_output_json", body
|
||||
)
|
||||
expected_output_json = _get_required_parameter_or_raise("expected_output_json", body)
|
||||
|
||||
result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts(
|
||||
python_script, input_json, expected_output_json
|
||||
|
|
|
@ -17,9 +17,7 @@ from spiffworkflow_backend.services.service_task_service import ServiceTaskServi
|
|||
def service_task_list() -> flask.wrappers.Response:
|
||||
"""Service_task_list."""
|
||||
available_connectors = ServiceTaskService.available_connectors()
|
||||
return Response(
|
||||
json.dumps(available_connectors), status=200, mimetype="application/json"
|
||||
)
|
||||
return Response(json.dumps(available_connectors), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def authentication_list() -> flask.wrappers.Response:
|
||||
|
@ -27,9 +25,7 @@ def authentication_list() -> flask.wrappers.Response:
|
|||
available_authentications = ServiceTaskService.authentication_list()
|
||||
response_json = {
|
||||
"results": available_authentications,
|
||||
"connector_proxy_base_url": current_app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL"
|
||||
],
|
||||
"connector_proxy_base_url": current_app.config["SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL"],
|
||||
"redirect_url": f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL']}/v1.0/authentication_callback",
|
||||
}
|
||||
|
||||
|
@ -43,9 +39,5 @@ def authentication_callback(
|
|||
"""Authentication_callback."""
|
||||
verify_token(request.args.get("token"), force_run=True)
|
||||
response = request.args["response"]
|
||||
SecretService.update_secret(
|
||||
f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True
|
||||
)
|
||||
return redirect(
|
||||
f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND']}/admin/configuration"
|
||||
)
|
||||
SecretService.update_secret(f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True)
|
||||
return redirect(f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND']}/admin/configuration")
|
||||
|
|
|
@ -104,9 +104,7 @@ def task_list_my_tasks(
|
|||
ProcessInstanceModel.status != ProcessInstanceStatus.error.value,
|
||||
)
|
||||
|
||||
potential_owner_usernames_from_group_concat_or_similar = (
|
||||
_get_potential_owner_usernames(assigned_user)
|
||||
)
|
||||
potential_owner_usernames_from_group_concat_or_similar = _get_potential_owner_usernames(assigned_user)
|
||||
|
||||
# FIXME: this breaks postgres. Look at commit c147cdb47b1481f094b8c3d82dc502fe961f4977 for
|
||||
# the postgres fix but it breaks the method for mysql.
|
||||
|
@ -147,9 +145,7 @@ def task_list_my_tasks(
|
|||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
def task_list_for_my_open_processes(
|
||||
page: int = 1, per_page: int = 100
|
||||
) -> flask.wrappers.Response:
|
||||
def task_list_for_my_open_processes(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
|
||||
"""Task_list_for_my_open_processes."""
|
||||
return _get_tasks(page=page, per_page=per_page)
|
||||
|
||||
|
@ -194,10 +190,7 @@ def task_data_show(
|
|||
if step_detail is None:
|
||||
raise ApiError(
|
||||
error_code="spiff_step_for_proces_instance_not_found",
|
||||
message=(
|
||||
"The given spiff step for the given process instance could not be"
|
||||
" found."
|
||||
),
|
||||
message="The given spiff step for the given process instance could not be found.",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
|
@ -228,9 +221,7 @@ def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> Non
|
|||
for ii, hidden_field_part in enumerate(hidden_field_parts):
|
||||
if hidden_field_part not in relevant_depth_of_ui_schema:
|
||||
relevant_depth_of_ui_schema[hidden_field_part] = {}
|
||||
relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[
|
||||
hidden_field_part
|
||||
]
|
||||
relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[hidden_field_part]
|
||||
if len(hidden_field_parts) == ii + 1:
|
||||
relevant_depth_of_ui_schema["ui:widget"] = "hidden"
|
||||
|
||||
|
@ -255,9 +246,7 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
|
|||
form_schema_file_name = ""
|
||||
form_ui_schema_file_name = ""
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
spiff_task = _get_spiff_task_from_process_instance(
|
||||
task_id, process_instance, processor=processor
|
||||
)
|
||||
spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance, processor=processor)
|
||||
extensions = spiff_task.task_spec.extensions
|
||||
|
||||
if "properties" in extensions:
|
||||
|
@ -276,23 +265,13 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
|
|||
refs = SpecFileService.get_references_for_process(process_model_with_form)
|
||||
all_processes = [i.identifier for i in refs]
|
||||
if task.process_identifier not in all_processes:
|
||||
top_process_name = processor.find_process_model_process_name_by_task_name(
|
||||
task.process_identifier
|
||||
)
|
||||
bpmn_file_full_path = (
|
||||
ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier(
|
||||
top_process_name
|
||||
)
|
||||
)
|
||||
relative_path = os.path.relpath(
|
||||
bpmn_file_full_path, start=FileSystemService.root_path()
|
||||
top_process_name = processor.find_process_model_process_name_by_task_name(task.process_identifier)
|
||||
bpmn_file_full_path = ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier(
|
||||
top_process_name
|
||||
)
|
||||
relative_path = os.path.relpath(bpmn_file_full_path, start=FileSystemService.root_path())
|
||||
process_model_relative_path = os.path.dirname(relative_path)
|
||||
process_model_with_form = (
|
||||
ProcessModelService.get_process_model_from_relative_path(
|
||||
process_model_relative_path
|
||||
)
|
||||
)
|
||||
process_model_with_form = ProcessModelService.get_process_model_from_relative_path(process_model_relative_path)
|
||||
|
||||
if task.type == "User Task":
|
||||
if not form_schema_file_name:
|
||||
|
@ -300,8 +279,7 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
|
|||
ApiError(
|
||||
error_code="missing_form_file",
|
||||
message=(
|
||||
"Cannot find a form file for process_instance_id:"
|
||||
f" {process_instance_id}, task_id: {task_id}"
|
||||
f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
|
@ -338,9 +316,7 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
|
|||
)
|
||||
except WorkflowTaskException as wfe:
|
||||
wfe.add_note("Failed to render instructions for end user.")
|
||||
raise ApiError.from_workflow_exception(
|
||||
"instructions_error", str(wfe), exp=wfe
|
||||
) from wfe
|
||||
raise ApiError.from_workflow_exception("instructions_error", str(wfe), exp=wfe) from wfe
|
||||
return make_response(jsonify(task), 200)
|
||||
|
||||
|
||||
|
@ -387,12 +363,8 @@ def task_submit_shared(
|
|||
)
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
spiff_task = _get_spiff_task_from_process_instance(
|
||||
task_id, process_instance, processor=processor
|
||||
)
|
||||
AuthorizationService.assert_user_can_complete_spiff_task(
|
||||
process_instance.id, spiff_task, principal.user
|
||||
)
|
||||
spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance, processor=processor)
|
||||
AuthorizationService.assert_user_can_complete_spiff_task(process_instance.id, spiff_task, principal.user)
|
||||
|
||||
if spiff_task.state != TaskState.READY:
|
||||
raise (
|
||||
|
@ -434,18 +406,14 @@ def task_submit_shared(
|
|||
# next_task = processor.next_task()
|
||||
|
||||
next_human_task_assigned_to_me = (
|
||||
HumanTaskModel.query.filter_by(
|
||||
process_instance_id=process_instance_id, completed=False
|
||||
)
|
||||
HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, completed=False)
|
||||
.order_by(asc(HumanTaskModel.id)) # type: ignore
|
||||
.join(HumanTaskUserModel)
|
||||
.filter_by(user_id=principal.user_id)
|
||||
.first()
|
||||
)
|
||||
if next_human_task_assigned_to_me:
|
||||
return make_response(
|
||||
jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200
|
||||
)
|
||||
return make_response(jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200)
|
||||
|
||||
return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
|
||||
|
||||
|
@ -457,9 +425,7 @@ def task_submit(
|
|||
terminate_loop: bool = False,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Task_submit_user_data."""
|
||||
with sentry_sdk.start_span(
|
||||
op="controller_action", description="tasks_controller.task_submit"
|
||||
):
|
||||
with sentry_sdk.start_span(op="controller_action", description="tasks_controller.task_submit"):
|
||||
return task_submit_shared(process_instance_id, task_id, body, terminate_loop)
|
||||
|
||||
|
||||
|
@ -492,9 +458,7 @@ def _get_tasks(
|
|||
assigned_user = aliased(UserModel)
|
||||
if processes_started_by_user:
|
||||
human_tasks_query = (
|
||||
human_tasks_query.filter(
|
||||
ProcessInstanceModel.process_initiator_id == user_id
|
||||
)
|
||||
human_tasks_query.filter(ProcessInstanceModel.process_initiator_id == user_id)
|
||||
.outerjoin(
|
||||
HumanTaskUserModel,
|
||||
HumanTaskModel.id == HumanTaskUserModel.human_task_id,
|
||||
|
@ -502,9 +466,7 @@ def _get_tasks(
|
|||
.outerjoin(assigned_user, assigned_user.id == HumanTaskUserModel.user_id)
|
||||
)
|
||||
else:
|
||||
human_tasks_query = human_tasks_query.filter(
|
||||
ProcessInstanceModel.process_initiator_id != user_id
|
||||
).join(
|
||||
human_tasks_query = human_tasks_query.filter(ProcessInstanceModel.process_initiator_id != user_id).join(
|
||||
HumanTaskUserModel,
|
||||
and_(
|
||||
HumanTaskUserModel.user_id == user_id,
|
||||
|
@ -514,9 +476,7 @@ def _get_tasks(
|
|||
|
||||
if has_lane_assignment_id:
|
||||
if user_group_identifier:
|
||||
human_tasks_query = human_tasks_query.filter(
|
||||
GroupModel.identifier == user_group_identifier
|
||||
)
|
||||
human_tasks_query = human_tasks_query.filter(GroupModel.identifier == user_group_identifier)
|
||||
else:
|
||||
human_tasks_query = human_tasks_query.filter(
|
||||
HumanTaskModel.lane_assignment_id.is_not(None) # type: ignore
|
||||
|
@ -524,9 +484,7 @@ def _get_tasks(
|
|||
else:
|
||||
human_tasks_query = human_tasks_query.filter(HumanTaskModel.lane_assignment_id.is_(None)) # type: ignore
|
||||
|
||||
potential_owner_usernames_from_group_concat_or_similar = (
|
||||
_get_potential_owner_usernames(assigned_user)
|
||||
)
|
||||
potential_owner_usernames_from_group_concat_or_similar = _get_potential_owner_usernames(assigned_user)
|
||||
|
||||
human_tasks = (
|
||||
human_tasks_query.add_columns(
|
||||
|
@ -558,9 +516,7 @@ def _get_tasks(
|
|||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
def _prepare_form_data(
|
||||
form_file: str, spiff_task: SpiffTask, process_model: ProcessModelInfo
|
||||
) -> dict:
|
||||
def _prepare_form_data(form_file: str, spiff_task: SpiffTask, process_model: ProcessModelInfo) -> dict:
|
||||
"""Prepare_form_data."""
|
||||
if spiff_task.data is None:
|
||||
return {}
|
||||
|
@ -576,42 +532,29 @@ def _prepare_form_data(
|
|||
raise (
|
||||
ApiError(
|
||||
error_code="error_loading_form",
|
||||
message=(
|
||||
f"Could not load form schema from: {form_file}."
|
||||
f" Error was: {str(exception)}"
|
||||
),
|
||||
message=f"Could not load form schema from: {form_file}. Error was: {str(exception)}",
|
||||
status_code=400,
|
||||
)
|
||||
) from exception
|
||||
except WorkflowTaskException as wfe:
|
||||
wfe.add_note(f"Error in Json Form File '{form_file}'")
|
||||
api_error = ApiError.from_workflow_exception(
|
||||
"instructions_error", str(wfe), exp=wfe
|
||||
)
|
||||
api_error = ApiError.from_workflow_exception("instructions_error", str(wfe), exp=wfe)
|
||||
api_error.file_name = form_file
|
||||
raise api_error
|
||||
|
||||
|
||||
def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) -> str:
|
||||
"""Render_jinja_template."""
|
||||
jinja_environment = jinja2.Environment(
|
||||
autoescape=True, lstrip_blocks=True, trim_blocks=True
|
||||
)
|
||||
jinja_environment = jinja2.Environment(autoescape=True, lstrip_blocks=True, trim_blocks=True)
|
||||
try:
|
||||
template = jinja_environment.from_string(unprocessed_template)
|
||||
return template.render(**spiff_task.data)
|
||||
except jinja2.exceptions.TemplateError as template_error:
|
||||
wfe = WorkflowTaskException(
|
||||
str(template_error), task=spiff_task, exception=template_error
|
||||
)
|
||||
wfe = WorkflowTaskException(str(template_error), task=spiff_task, exception=template_error)
|
||||
if isinstance(template_error, TemplateSyntaxError):
|
||||
wfe.line_number = template_error.lineno
|
||||
wfe.error_line = template_error.source.split("\n")[
|
||||
template_error.lineno - 1
|
||||
]
|
||||
wfe.add_note(
|
||||
"Jinja2 template errors can happen when trying to display task data"
|
||||
)
|
||||
wfe.error_line = template_error.source.split("\n")[template_error.lineno - 1]
|
||||
wfe.add_note("Jinja2 template errors can happen when trying to display task data")
|
||||
raise wfe from template_error
|
||||
except Exception as error:
|
||||
_type, _value, tb = exc_info()
|
||||
|
@ -621,9 +564,7 @@ def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) ->
|
|||
wfe.line_number = tb.tb_lineno
|
||||
wfe.error_line = unprocessed_template.split("\n")[tb.tb_lineno - 1]
|
||||
tb = tb.tb_next
|
||||
wfe.add_note(
|
||||
"Jinja2 template errors can happen when trying to display task data"
|
||||
)
|
||||
wfe.add_note("Jinja2 template errors can happen when trying to display task data")
|
||||
raise wfe from error
|
||||
|
||||
|
||||
|
@ -650,9 +591,7 @@ def _get_spiff_task_from_process_instance(
|
|||
|
||||
|
||||
# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches
|
||||
def _update_form_schema_with_task_data_as_needed(
|
||||
in_dict: dict, task: Task, spiff_task: SpiffTask
|
||||
) -> None:
|
||||
def _update_form_schema_with_task_data_as_needed(in_dict: dict, task: Task, spiff_task: SpiffTask) -> None:
|
||||
"""Update_nested."""
|
||||
if task.data is None:
|
||||
return None
|
||||
|
@ -664,12 +603,8 @@ def _update_form_schema_with_task_data_as_needed(
|
|||
if len(value) == 1:
|
||||
first_element_in_value_list = value[0]
|
||||
if isinstance(first_element_in_value_list, str):
|
||||
if first_element_in_value_list.startswith(
|
||||
"options_from_task_data_var:"
|
||||
):
|
||||
task_data_var = first_element_in_value_list.replace(
|
||||
"options_from_task_data_var:", ""
|
||||
)
|
||||
if first_element_in_value_list.startswith("options_from_task_data_var:"):
|
||||
task_data_var = first_element_in_value_list.replace("options_from_task_data_var:", "")
|
||||
|
||||
if task_data_var not in task.data:
|
||||
wte = WorkflowTaskException(
|
||||
|
@ -691,10 +626,7 @@ def _update_form_schema_with_task_data_as_needed(
|
|||
|
||||
select_options_from_task_data = task.data.get(task_data_var)
|
||||
if isinstance(select_options_from_task_data, list):
|
||||
if all(
|
||||
"value" in d and "label" in d
|
||||
for d in select_options_from_task_data
|
||||
):
|
||||
if all("value" in d and "label" in d for d in select_options_from_task_data):
|
||||
|
||||
def map_function(
|
||||
task_data_select_option: TaskDataSelectOption,
|
||||
|
@ -744,9 +676,7 @@ def _find_human_task_or_raise(
|
|||
process_instance_id=process_instance_id, task_id=task_id, completed=False
|
||||
)
|
||||
else:
|
||||
human_task_query = HumanTaskModel.query.filter_by(
|
||||
process_instance_id=process_instance_id, task_id=task_id
|
||||
)
|
||||
human_task_query = HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, task_id=task_id)
|
||||
|
||||
human_task: HumanTaskModel = human_task_query.first()
|
||||
if human_task is None:
|
||||
|
|
|
@ -80,8 +80,7 @@ def verify_token(
|
|||
user_model = get_user_from_decoded_internal_token(decoded_token)
|
||||
except Exception as e:
|
||||
current_app.logger.error(
|
||||
"Exception in verify_token getting user from decoded"
|
||||
f" internal token. {e}"
|
||||
f"Exception in verify_token getting user from decoded internal token. {e}"
|
||||
)
|
||||
elif "iss" in decoded_token.keys():
|
||||
user_info = None
|
||||
|
@ -90,22 +89,12 @@ def verify_token(
|
|||
user_info = decoded_token
|
||||
except TokenExpiredError as token_expired_error:
|
||||
# Try to refresh the token
|
||||
user = UserService.get_user_by_service_and_service_id(
|
||||
decoded_token["iss"], decoded_token["sub"]
|
||||
)
|
||||
user = UserService.get_user_by_service_and_service_id(decoded_token["iss"], decoded_token["sub"])
|
||||
if user:
|
||||
refresh_token = AuthenticationService.get_refresh_token(user.id)
|
||||
if refresh_token:
|
||||
auth_token: dict = (
|
||||
AuthenticationService.get_auth_token_from_refresh_token(
|
||||
refresh_token
|
||||
)
|
||||
)
|
||||
if (
|
||||
auth_token
|
||||
and "error" not in auth_token
|
||||
and "id_token" in auth_token
|
||||
):
|
||||
auth_token: dict = AuthenticationService.get_auth_token_from_refresh_token(refresh_token)
|
||||
if auth_token and "error" not in auth_token and "id_token" in auth_token:
|
||||
tld = current_app.config["THREAD_LOCAL_DATA"]
|
||||
tld.new_access_token = auth_token["id_token"]
|
||||
tld.new_id_token = auth_token["id_token"]
|
||||
|
@ -130,9 +119,7 @@ def verify_token(
|
|||
status_code=401,
|
||||
) from e
|
||||
if (
|
||||
user_info is not None
|
||||
and "error" not in user_info
|
||||
and "iss" in user_info
|
||||
user_info is not None and "error" not in user_info and "iss" in user_info
|
||||
): # not sure what to test yet
|
||||
user_model = (
|
||||
UserModel.query.filter(UserModel.service == user_info["iss"])
|
||||
|
@ -154,9 +141,7 @@ def verify_token(
|
|||
)
|
||||
|
||||
else:
|
||||
current_app.logger.debug(
|
||||
"token_type not in decode_token in verify_token"
|
||||
)
|
||||
current_app.logger.debug("token_type not in decode_token in verify_token")
|
||||
raise ApiError(
|
||||
error_code="invalid_token",
|
||||
message="Invalid token. Please log in.",
|
||||
|
@ -175,9 +160,7 @@ def verify_token(
|
|||
else:
|
||||
raise ApiError(error_code="no_user_id", message="Cannot get a user id")
|
||||
|
||||
raise ApiError(
|
||||
error_code="invalid_token", message="Cannot validate token.", status_code=401
|
||||
)
|
||||
raise ApiError(error_code="invalid_token", message="Cannot validate token.", status_code=401)
|
||||
|
||||
|
||||
def set_new_access_token_in_cookie(
|
||||
|
@ -193,30 +176,20 @@ def set_new_access_token_in_cookie(
|
|||
"",
|
||||
current_app.config["SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND"],
|
||||
)
|
||||
if domain_for_frontend_cookie and domain_for_frontend_cookie.startswith(
|
||||
"localhost"
|
||||
):
|
||||
if domain_for_frontend_cookie and domain_for_frontend_cookie.startswith("localhost"):
|
||||
domain_for_frontend_cookie = None
|
||||
|
||||
# fixme - we should not be passing the access token back to the client
|
||||
if hasattr(tld, "new_access_token") and tld.new_access_token:
|
||||
response.set_cookie(
|
||||
"access_token", tld.new_access_token, domain=domain_for_frontend_cookie
|
||||
)
|
||||
response.set_cookie("access_token", tld.new_access_token, domain=domain_for_frontend_cookie)
|
||||
|
||||
# id_token is required for logging out since this gets passed back to the openid server
|
||||
if hasattr(tld, "new_id_token") and tld.new_id_token:
|
||||
response.set_cookie(
|
||||
"id_token", tld.new_id_token, domain=domain_for_frontend_cookie
|
||||
)
|
||||
response.set_cookie("id_token", tld.new_id_token, domain=domain_for_frontend_cookie)
|
||||
|
||||
if hasattr(tld, "user_has_logged_out") and tld.user_has_logged_out:
|
||||
response.set_cookie(
|
||||
"id_token", "", max_age=0, domain=domain_for_frontend_cookie
|
||||
)
|
||||
response.set_cookie(
|
||||
"access_token", "", max_age=0, domain=domain_for_frontend_cookie
|
||||
)
|
||||
response.set_cookie("id_token", "", max_age=0, domain=domain_for_frontend_cookie)
|
||||
response.set_cookie("access_token", "", max_age=0, domain=domain_for_frontend_cookie)
|
||||
|
||||
_clear_auth_tokens_from_thread_local_data()
|
||||
|
||||
|
@ -236,9 +209,7 @@ def encode_auth_token(sub: str, token_type: Optional[str] = None) -> str:
|
|||
secret_key = current_app.config.get("SECRET_KEY")
|
||||
else:
|
||||
current_app.logger.error("Missing SECRET_KEY in encode_auth_token")
|
||||
raise ApiError(
|
||||
error_code="encode_error", message="Missing SECRET_KEY in encode_auth_token"
|
||||
)
|
||||
raise ApiError(error_code="encode_error", message="Missing SECRET_KEY in encode_auth_token")
|
||||
return jwt.encode(
|
||||
payload,
|
||||
str(secret_key),
|
||||
|
@ -249,9 +220,7 @@ def encode_auth_token(sub: str, token_type: Optional[str] = None) -> str:
|
|||
def login(redirect_url: str = "/") -> Response:
|
||||
"""Login."""
|
||||
state = AuthenticationService.generate_state(redirect_url)
|
||||
login_redirect_url = AuthenticationService().get_login_redirect_url(
|
||||
state.decode("UTF-8")
|
||||
)
|
||||
login_redirect_url = AuthenticationService().get_login_redirect_url(state.decode("UTF-8"))
|
||||
return redirect(login_redirect_url)
|
||||
|
||||
|
||||
|
@ -281,9 +250,7 @@ def login_return(code: str, state: str, session_state: str = "") -> Optional[Res
|
|||
g.user = user_model.id
|
||||
g.token = auth_token_object["id_token"]
|
||||
if "refresh_token" in auth_token_object:
|
||||
AuthenticationService.store_refresh_token(
|
||||
user_model.id, auth_token_object["refresh_token"]
|
||||
)
|
||||
AuthenticationService.store_refresh_token(user_model.id, auth_token_object["refresh_token"])
|
||||
redirect_url = state_redirect_url
|
||||
tld = current_app.config["THREAD_LOCAL_DATA"]
|
||||
tld.new_access_token = auth_token_object["id_token"]
|
||||
|
@ -325,9 +292,7 @@ def login_api() -> Response:
|
|||
"""Login_api."""
|
||||
redirect_url = "/v1.0/login_api_return"
|
||||
state = AuthenticationService.generate_state(redirect_url)
|
||||
login_redirect_url = AuthenticationService().get_login_redirect_url(
|
||||
state.decode("UTF-8"), redirect_url
|
||||
)
|
||||
login_redirect_url = AuthenticationService().get_login_redirect_url(state.decode("UTF-8"), redirect_url)
|
||||
return redirect(login_redirect_url)
|
||||
|
||||
|
||||
|
@ -335,9 +300,7 @@ def login_api_return(code: str, state: str, session_state: str) -> str:
|
|||
state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8"))
|
||||
state_dict["redirect_url"]
|
||||
|
||||
auth_token_object = AuthenticationService().get_auth_token_object(
|
||||
code, "/v1.0/login_api_return"
|
||||
)
|
||||
auth_token_object = AuthenticationService().get_auth_token_object(code, "/v1.0/login_api_return")
|
||||
access_token: str = auth_token_object["access_token"]
|
||||
if access_token is None:
|
||||
raise MissingAccessTokenError("Cannot find the access token for the request")
|
||||
|
@ -365,16 +328,12 @@ def get_decoded_token(token: str) -> Optional[Dict]:
|
|||
try:
|
||||
decoded_token = jwt.decode(token, options={"verify_signature": False})
|
||||
except Exception as e:
|
||||
raise ApiError(
|
||||
error_code="invalid_token", message="Cannot decode token."
|
||||
) from e
|
||||
raise ApiError(error_code="invalid_token", message="Cannot decode token.") from e
|
||||
else:
|
||||
if "token_type" in decoded_token or "iss" in decoded_token:
|
||||
return decoded_token
|
||||
else:
|
||||
current_app.logger.error(
|
||||
f"Unknown token type in get_decoded_token: token: {token}"
|
||||
)
|
||||
current_app.logger.error(f"Unknown token type in get_decoded_token: token: {token}")
|
||||
raise ApiError(
|
||||
error_code="unknown_token",
|
||||
message="Unknown token type in get_decoded_token",
|
||||
|
@ -397,9 +356,7 @@ def get_user_from_decoded_internal_token(decoded_token: dict) -> Optional[UserMo
|
|||
service = parts[0].split(":")[1]
|
||||
service_id = parts[1].split(":")[1]
|
||||
user: UserModel = (
|
||||
UserModel.query.filter(UserModel.service == service)
|
||||
.filter(UserModel.service_id == service_id)
|
||||
.first()
|
||||
UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first()
|
||||
)
|
||||
if user:
|
||||
return user
|
||||
|
|
|
@ -98,11 +98,7 @@ def create_group(group_name: str) -> flask.wrappers.Response:
|
|||
try:
|
||||
db.session.add(group)
|
||||
except IntegrityError as exception:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="integrity_error", message=repr(exception), status_code=500
|
||||
)
|
||||
) from exception
|
||||
raise (ApiError(error_code="integrity_error", message=repr(exception), status_code=500)) from exception
|
||||
db.session.commit()
|
||||
|
||||
return Response(json.dumps({"id": group.id}), status=201, mimetype=APPLICATION_JSON)
|
||||
|
@ -133,9 +129,7 @@ def assign_user_to_group() -> flask.wrappers.Response:
|
|||
user = get_user_from_request()
|
||||
group = get_group_from_request()
|
||||
|
||||
user_group_assignment = UserGroupAssignmentModel.query.filter_by(
|
||||
user_id=user.id, group_id=group.id
|
||||
).first()
|
||||
user_group_assignment = UserGroupAssignmentModel.query.filter_by(user_id=user.id, group_id=group.id).first()
|
||||
if user_group_assignment is not None:
|
||||
raise (
|
||||
ApiError(
|
||||
|
@ -162,9 +156,7 @@ def remove_user_from_group() -> flask.wrappers.Response:
|
|||
user = get_user_from_request()
|
||||
group = get_group_from_request()
|
||||
|
||||
user_group_assignment = UserGroupAssignmentModel.query.filter_by(
|
||||
user_id=user.id, group_id=group.id
|
||||
).first()
|
||||
user_group_assignment = UserGroupAssignmentModel.query.filter_by(user_id=user.id, group_id=group.id).first()
|
||||
if user_group_assignment is None:
|
||||
raise (
|
||||
ApiError(
|
||||
|
|
|
@ -24,9 +24,6 @@ def user_group_list_for_current_user() -> flask.wrappers.Response:
|
|||
groups = g.user.groups
|
||||
# TODO: filter out the default group and have a way to know what is the default group
|
||||
group_identifiers = [
|
||||
i.identifier
|
||||
for i in groups
|
||||
if i.identifier
|
||||
!= current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"]
|
||||
i.identifier for i in groups if i.identifier != current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"]
|
||||
]
|
||||
return make_response(jsonify(sorted(group_identifiers)), 200)
|
||||
|
|
|
@ -36,15 +36,10 @@ class DeleteProcessInstancesWithCriteria(Script):
|
|||
delete_criteria.append(
|
||||
(ProcessInstanceModel.process_model_identifier == criteria["name"])
|
||||
& ProcessInstanceModel.status.in_(criteria["status"]) # type: ignore
|
||||
& (
|
||||
ProcessInstanceModel.updated_at_in_seconds
|
||||
< (delete_time - criteria["last_updated_delta"])
|
||||
)
|
||||
& (ProcessInstanceModel.updated_at_in_seconds < (delete_time - criteria["last_updated_delta"]))
|
||||
)
|
||||
|
||||
results = (
|
||||
ProcessInstanceModel.query.filter(or_(*delete_criteria)).limit(100).all()
|
||||
)
|
||||
results = ProcessInstanceModel.query.filter(or_(*delete_criteria)).limit(100).all()
|
||||
rows_affected = len(results)
|
||||
|
||||
if rows_affected > 0:
|
||||
|
|
|
@ -20,12 +20,7 @@ class FactService(Script):
|
|||
return """Just your basic class that can pull in data from a few api endpoints and
|
||||
do a basic task."""
|
||||
|
||||
def run(
|
||||
self,
|
||||
script_attributes_context: ScriptAttributesContext,
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
) -> Any:
|
||||
def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> Any:
|
||||
"""Run."""
|
||||
if "type" not in kwargs:
|
||||
raise Exception("Please specify a 'type' of fact as a keyword argument.")
|
||||
|
@ -35,10 +30,7 @@ class FactService(Script):
|
|||
if fact == "cat":
|
||||
details = "The cat in the hat" # self.get_cat()
|
||||
elif fact == "norris":
|
||||
details = (
|
||||
"Chuck Norris doesn’t read books. He stares them down until he gets the"
|
||||
" information he wants."
|
||||
)
|
||||
details = "Chuck Norris doesn’t read books. He stares them down until he gets the information he wants."
|
||||
elif fact == "buzzword":
|
||||
details = "Move the Needle." # self.get_buzzword()
|
||||
else:
|
||||
|
|
|
@ -34,8 +34,7 @@ class GetAllPermissions(Script):
|
|||
.join(GroupModel, GroupModel.id == PrincipalModel.group_id)
|
||||
.join(
|
||||
PermissionTargetModel,
|
||||
PermissionTargetModel.id
|
||||
== PermissionAssignmentModel.permission_target_id,
|
||||
PermissionTargetModel.id == PermissionAssignmentModel.permission_target_id,
|
||||
)
|
||||
.add_columns(
|
||||
PermissionAssignmentModel.permission,
|
||||
|
@ -46,9 +45,7 @@ class GetAllPermissions(Script):
|
|||
|
||||
permissions: OrderedDict[tuple[str, str], list[str]] = OrderedDict()
|
||||
for pa in permission_assignments:
|
||||
permissions.setdefault((pa.group_identifier, pa.uri), []).append(
|
||||
pa.permission
|
||||
)
|
||||
permissions.setdefault((pa.group_identifier, pa.uri), []).append(pa.permission)
|
||||
|
||||
def replace_suffix(string: str, old: str, new: str) -> str:
|
||||
"""Replace_suffix."""
|
||||
|
|
|
@ -20,12 +20,7 @@ class GetCurrentUser(Script):
|
|||
"""Get_description."""
|
||||
return """Return the current user."""
|
||||
|
||||
def run(
|
||||
self,
|
||||
script_attributes_context: ScriptAttributesContext,
|
||||
*_args: Any,
|
||||
**kwargs: Any
|
||||
) -> Any:
|
||||
def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any:
|
||||
"""Run."""
|
||||
# dump the user using our json encoder and then load it back up as a dict
|
||||
# to remove unwanted field types
|
||||
|
|
|
@ -27,12 +27,7 @@ class GetDataSizes(Script):
|
|||
return """Returns a dictionary of information about the size of task data and
|
||||
the python environment for the currently running process."""
|
||||
|
||||
def run(
|
||||
self,
|
||||
script_attributes_context: ScriptAttributesContext,
|
||||
*_args: Any,
|
||||
**kwargs: Any
|
||||
) -> Any:
|
||||
def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any:
|
||||
"""Run."""
|
||||
if script_attributes_context.task is None:
|
||||
raise TaskNotGivenToScriptError(
|
||||
|
@ -42,8 +37,7 @@ class GetDataSizes(Script):
|
|||
workflow = script_attributes_context.task.workflow
|
||||
task_data_size = ProcessInstanceProcessor.get_task_data_size(workflow)
|
||||
task_data_keys_by_task = {
|
||||
t.task_spec.name: sorted(t.data.keys())
|
||||
for t in ProcessInstanceProcessor.get_tasks_with_data(workflow)
|
||||
t.task_spec.name: sorted(t.data.keys()) for t in ProcessInstanceProcessor.get_tasks_with_data(workflow)
|
||||
}
|
||||
python_env_size = ProcessInstanceProcessor.get_python_env_size(workflow)
|
||||
python_env_keys = workflow.script_engine.environment.user_defined_state().keys()
|
||||
|
|
|
@ -42,8 +42,6 @@ class GetEncodedFileData(Script):
|
|||
).first()
|
||||
|
||||
base64_value = base64.b64encode(file_data.contents).decode("ascii")
|
||||
encoded_file_data = (
|
||||
f"data:{file_data.mimetype};name={file_data.filename};base64,{base64_value}"
|
||||
)
|
||||
encoded_file_data = f"data:{file_data.mimetype};name={file_data.filename};base64,{base64_value}"
|
||||
|
||||
return encoded_file_data
|
||||
|
|
|
@ -19,11 +19,6 @@ class GetEnv(Script):
|
|||
"""Get_description."""
|
||||
return """Returns the current environment - ie testing, staging, production."""
|
||||
|
||||
def run(
|
||||
self,
|
||||
script_attributes_context: ScriptAttributesContext,
|
||||
*_args: Any,
|
||||
**kwargs: Any
|
||||
) -> Any:
|
||||
def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any:
|
||||
"""Run."""
|
||||
return script_attributes_context.environment_identifier
|
||||
|
|
|
@ -21,11 +21,6 @@ class GetFrontendUrl(Script):
|
|||
"""Get_description."""
|
||||
return """Return the url to the frontend."""
|
||||
|
||||
def run(
|
||||
self,
|
||||
script_attributes_context: ScriptAttributesContext,
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
) -> Any:
|
||||
def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> Any:
|
||||
"""Run."""
|
||||
return current_app.config["SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND"]
|
||||
|
|
|
@ -32,8 +32,7 @@ class GetGroupMembers(Script):
|
|||
group = GroupModel.query.filter_by(identifier=group_identifier).first()
|
||||
if group is None:
|
||||
raise GroupNotFoundError(
|
||||
"Script 'get_group_members' could not find group with identifier"
|
||||
f" '{group_identifier}'."
|
||||
f"Script 'get_group_members' could not find group with identifier '{group_identifier}'."
|
||||
)
|
||||
|
||||
usernames = [u.username for u in group.users]
|
||||
|
|
|
@ -24,12 +24,7 @@ class GetLocaltime(Script):
|
|||
return """Converts a Datetime object into a Datetime object for a specific timezone.
|
||||
Defaults to US/Eastern."""
|
||||
|
||||
def run(
|
||||
self,
|
||||
script_attributes_context: ScriptAttributesContext,
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
) -> datetime:
|
||||
def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> datetime:
|
||||
"""Run."""
|
||||
if len(args) > 0 or "datetime" in kwargs:
|
||||
if "datetime" in kwargs:
|
||||
|
|
|
@ -19,16 +19,9 @@ class GetProcessInfo(Script):
|
|||
"""Get_description."""
|
||||
return """Returns a dictionary of information about the currently running process."""
|
||||
|
||||
def run(
|
||||
self,
|
||||
script_attributes_context: ScriptAttributesContext,
|
||||
*_args: Any,
|
||||
**kwargs: Any
|
||||
) -> Any:
|
||||
def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any:
|
||||
"""Run."""
|
||||
return {
|
||||
"process_instance_id": script_attributes_context.process_instance_id,
|
||||
"process_model_identifier": (
|
||||
script_attributes_context.process_model_identifier
|
||||
),
|
||||
"process_model_identifier": script_attributes_context.process_model_identifier,
|
||||
}
|
||||
|
|
|
@ -26,9 +26,7 @@ class GetProcessInitiatorUser(Script):
|
|||
) -> Any:
|
||||
"""Run."""
|
||||
process_instance = (
|
||||
ProcessInstanceModel.query.filter_by(
|
||||
id=script_attributes_context.process_instance_id
|
||||
)
|
||||
ProcessInstanceModel.query.filter_by(id=script_attributes_context.process_instance_id)
|
||||
.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id)
|
||||
.first()
|
||||
)
|
||||
|
|
|
@ -15,11 +15,6 @@ class GetSecret(Script):
|
|||
"""Get_description."""
|
||||
return """Returns the value for a previously configured secret."""
|
||||
|
||||
def run(
|
||||
self,
|
||||
script_attributes_context: ScriptAttributesContext,
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
) -> Any:
|
||||
def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> Any:
|
||||
"""Run."""
|
||||
return SecretService.get_secret(args[0]).value
|
||||
|
|
|
@ -37,19 +37,13 @@ class GetMarkdownFileDownloadLink(Script):
|
|||
label = parts[1].split("=")[1]
|
||||
process_model_identifier = script_attributes_context.process_model_identifier
|
||||
if process_model_identifier is None:
|
||||
raise self.get_proces_model_identifier_is_missing_error(
|
||||
"markdown_file_download_link"
|
||||
)
|
||||
modified_process_model_identifier = (
|
||||
ProcessModelInfo.modify_process_identifier_for_path_param(
|
||||
process_model_identifier
|
||||
)
|
||||
raise self.get_proces_model_identifier_is_missing_error("markdown_file_download_link")
|
||||
modified_process_model_identifier = ProcessModelInfo.modify_process_identifier_for_path_param(
|
||||
process_model_identifier
|
||||
)
|
||||
process_instance_id = script_attributes_context.process_instance_id
|
||||
if process_instance_id is None:
|
||||
raise self.get_proces_instance_id_is_missing_error(
|
||||
"save_process_instance_metadata"
|
||||
)
|
||||
raise self.get_proces_instance_id_is_missing_error("save_process_instance_metadata")
|
||||
url = current_app.config["SPIFFWORKFLOW_BACKEND_URL"]
|
||||
url += (
|
||||
f"/v1.0/process-data-file-download/{modified_process_model_identifier}/"
|
||||
|
|
|
@ -27,9 +27,7 @@ class SaveProcessInstanceMetadata(Script):
|
|||
"""Run."""
|
||||
metadata_dict = args[0]
|
||||
if script_attributes_context.process_instance_id is None:
|
||||
raise self.get_proces_instance_id_is_missing_error(
|
||||
"save_process_instance_metadata"
|
||||
)
|
||||
raise self.get_proces_instance_id_is_missing_error("save_process_instance_metadata")
|
||||
for key, value in metadata_dict.items():
|
||||
pim = ProcessInstanceMetadataModel.query.filter_by(
|
||||
process_instance_id=script_attributes_context.process_instance_id,
|
||||
|
|
|
@ -52,14 +52,11 @@ class Script:
|
|||
"""Run."""
|
||||
raise ApiError(
|
||||
"invalid_script",
|
||||
"This is an internal error. The script you are trying to execute '%s' "
|
||||
% self.__class__.__name__
|
||||
"This is an internal error. The script you are trying to execute '%s' " % self.__class__.__name__
|
||||
+ "does not properly implement the run function.",
|
||||
)
|
||||
|
||||
def get_proces_instance_id_is_missing_error(
|
||||
self, script_name: str
|
||||
) -> ProcessInstanceIdMissingError:
|
||||
def get_proces_instance_id_is_missing_error(self, script_name: str) -> ProcessInstanceIdMissingError:
|
||||
"""Return the error so we can raise it from the script and mypy will be happy."""
|
||||
raise ProcessInstanceIdMissingError(
|
||||
"The process instance id was not given to script"
|
||||
|
@ -67,9 +64,7 @@ class Script:
|
|||
" within the context of a process instance."
|
||||
)
|
||||
|
||||
def get_proces_model_identifier_is_missing_error(
|
||||
self, script_name: str
|
||||
) -> ProcessModelIdentifierMissingError:
|
||||
def get_proces_model_identifier_is_missing_error(self, script_name: str) -> ProcessModelIdentifierMissingError:
|
||||
"""Return the error so we can raise it from the script and mypy will be happy."""
|
||||
return ProcessModelIdentifierMissingError(
|
||||
"The process model identifier was not given to script"
|
||||
|
|
|
@ -56,9 +56,7 @@ class AuthenticationProviderTypes(enum.Enum):
|
|||
class AuthenticationService:
|
||||
"""AuthenticationService."""
|
||||
|
||||
ENDPOINT_CACHE: dict = (
|
||||
{}
|
||||
) # We only need to find the openid endpoints once, then we can cache them.
|
||||
ENDPOINT_CACHE: dict = {} # We only need to find the openid endpoints once, then we can cache them.
|
||||
|
||||
@staticmethod
|
||||
def client_id() -> str:
|
||||
|
@ -73,9 +71,7 @@ class AuthenticationService:
|
|||
@staticmethod
|
||||
def secret_key() -> str:
|
||||
"""Returns the secret key from the config."""
|
||||
return current_app.config.get(
|
||||
"SPIFFWORKFLOW_BACKEND_OPEN_ID_CLIENT_SECRET_KEY", ""
|
||||
)
|
||||
return current_app.config.get("SPIFFWORKFLOW_BACKEND_OPEN_ID_CLIENT_SECRET_KEY", "")
|
||||
|
||||
@classmethod
|
||||
def open_id_endpoint_for_name(cls, name: str) -> str:
|
||||
|
@ -85,10 +81,7 @@ class AuthenticationService:
|
|||
response = requests.get(openid_config_url)
|
||||
AuthenticationService.ENDPOINT_CACHE = response.json()
|
||||
if name not in AuthenticationService.ENDPOINT_CACHE:
|
||||
raise Exception(
|
||||
f"Unknown OpenID Endpoint: {name}. Tried to get from"
|
||||
f" {openid_config_url}"
|
||||
)
|
||||
raise Exception(f"Unknown OpenID Endpoint: {name}. Tried to get from {openid_config_url}")
|
||||
return AuthenticationService.ENDPOINT_CACHE.get(name, "")
|
||||
|
||||
@staticmethod
|
||||
|
@ -114,9 +107,7 @@ class AuthenticationService:
|
|||
state = base64.b64encode(bytes(str({"redirect_url": redirect_url}), "UTF-8"))
|
||||
return state
|
||||
|
||||
def get_login_redirect_url(
|
||||
self, state: str, redirect_url: str = "/v1.0/login_return"
|
||||
) -> str:
|
||||
def get_login_redirect_url(self, state: str, redirect_url: str = "/v1.0/login_return") -> str:
|
||||
"""Get_login_redirect_url."""
|
||||
return_redirect_url = f"{self.get_backend_url()}{redirect_url}"
|
||||
login_redirect_url = (
|
||||
|
@ -129,9 +120,7 @@ class AuthenticationService:
|
|||
)
|
||||
return login_redirect_url
|
||||
|
||||
def get_auth_token_object(
|
||||
self, code: str, redirect_url: str = "/v1.0/login_return"
|
||||
) -> dict:
|
||||
def get_auth_token_object(self, code: str, redirect_url: str = "/v1.0/login_return") -> dict:
|
||||
"""Get_auth_token_object."""
|
||||
backend_basic_auth_string = f"{self.client_id()}:{self.secret_key()}"
|
||||
backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii")
|
||||
|
@ -174,9 +163,7 @@ class AuthenticationService:
|
|||
audience_array_in_token = aud
|
||||
if isinstance(aud, str):
|
||||
audience_array_in_token = [aud]
|
||||
overlapping_aud_values = [
|
||||
x for x in audience_array_in_token if x in valid_audience_values
|
||||
]
|
||||
overlapping_aud_values = [x for x in audience_array_in_token if x in valid_audience_values]
|
||||
|
||||
if iss != cls.server_url():
|
||||
valid = False
|
||||
|
@ -211,15 +198,11 @@ class AuthenticationService:
|
|||
@staticmethod
|
||||
def store_refresh_token(user_id: int, refresh_token: str) -> None:
|
||||
"""Store_refresh_token."""
|
||||
refresh_token_model = RefreshTokenModel.query.filter(
|
||||
RefreshTokenModel.user_id == user_id
|
||||
).first()
|
||||
refresh_token_model = RefreshTokenModel.query.filter(RefreshTokenModel.user_id == user_id).first()
|
||||
if refresh_token_model:
|
||||
refresh_token_model.token = refresh_token
|
||||
else:
|
||||
refresh_token_model = RefreshTokenModel(
|
||||
user_id=user_id, token=refresh_token
|
||||
)
|
||||
refresh_token_model = RefreshTokenModel(user_id=user_id, token=refresh_token)
|
||||
db.session.add(refresh_token_model)
|
||||
try:
|
||||
db.session.commit()
|
||||
|
|
|
@ -108,9 +108,7 @@ class AuthorizationService:
|
|||
)
|
||||
|
||||
received_sign = auth_header.split("sha256=")[-1].strip()
|
||||
secret = current_app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET"
|
||||
].encode()
|
||||
secret = current_app.config["SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET"].encode()
|
||||
expected_sign = HMAC(key=secret, msg=request.data, digestmod=sha256).hexdigest()
|
||||
if not compare_digest(received_sign, expected_sign):
|
||||
raise TokenInvalidError(
|
||||
|
@ -118,17 +116,13 @@ class AuthorizationService:
|
|||
)
|
||||
|
||||
@classmethod
|
||||
def has_permission(
|
||||
cls, principals: list[PrincipalModel], permission: str, target_uri: str
|
||||
) -> bool:
|
||||
def has_permission(cls, principals: list[PrincipalModel], permission: str, target_uri: str) -> bool:
|
||||
"""Has_permission."""
|
||||
principal_ids = [p.id for p in principals]
|
||||
target_uri_normalized = target_uri.removeprefix(V1_API_PATH_PREFIX)
|
||||
|
||||
permission_assignments = (
|
||||
PermissionAssignmentModel.query.filter(
|
||||
PermissionAssignmentModel.principal_id.in_(principal_ids)
|
||||
)
|
||||
PermissionAssignmentModel.query.filter(PermissionAssignmentModel.principal_id.in_(principal_ids))
|
||||
.filter_by(permission=permission)
|
||||
.join(PermissionTargetModel)
|
||||
.filter(
|
||||
|
@ -136,10 +130,7 @@ class AuthorizationService:
|
|||
text(f"'{target_uri_normalized}' LIKE permission_target.uri"),
|
||||
# to check for exact matches as well
|
||||
# see test_user_can_access_base_path_when_given_wildcard_permission unit test
|
||||
text(
|
||||
f"'{target_uri_normalized}' ="
|
||||
" replace(replace(permission_target.uri, '/%', ''), ':%', '')"
|
||||
),
|
||||
text(f"'{target_uri_normalized}' = replace(replace(permission_target.uri, '/%', ''), ':%', '')"),
|
||||
)
|
||||
)
|
||||
.all()
|
||||
|
@ -150,29 +141,21 @@ class AuthorizationService:
|
|||
elif permission_assignment.grant_type == "deny":
|
||||
return False
|
||||
else:
|
||||
raise Exception(
|
||||
f"Unknown grant type: {permission_assignment.grant_type}"
|
||||
)
|
||||
raise Exception(f"Unknown grant type: {permission_assignment.grant_type}")
|
||||
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def user_has_permission(
|
||||
cls, user: UserModel, permission: str, target_uri: str
|
||||
) -> bool:
|
||||
def user_has_permission(cls, user: UserModel, permission: str, target_uri: str) -> bool:
|
||||
"""User_has_permission."""
|
||||
if user.principal is None:
|
||||
raise MissingPrincipalError(
|
||||
f"Missing principal for user with id: {user.id}"
|
||||
)
|
||||
raise MissingPrincipalError(f"Missing principal for user with id: {user.id}")
|
||||
|
||||
principals = [user.principal]
|
||||
|
||||
for group in user.groups:
|
||||
if group.principal is None:
|
||||
raise MissingPrincipalError(
|
||||
f"Missing principal for group with id: {group.id}"
|
||||
)
|
||||
raise MissingPrincipalError(f"Missing principal for group with id: {group.id}")
|
||||
principals.append(group.principal)
|
||||
|
||||
return cls.has_permission(principals, permission, target_uri)
|
||||
|
@ -191,26 +174,19 @@ class AuthorizationService:
|
|||
@classmethod
|
||||
def associate_user_with_group(cls, user: UserModel, group: GroupModel) -> None:
|
||||
"""Associate_user_with_group."""
|
||||
user_group_assignemnt = UserGroupAssignmentModel.query.filter_by(
|
||||
user_id=user.id, group_id=group.id
|
||||
).first()
|
||||
user_group_assignemnt = UserGroupAssignmentModel.query.filter_by(user_id=user.id, group_id=group.id).first()
|
||||
if user_group_assignemnt is None:
|
||||
user_group_assignemnt = UserGroupAssignmentModel(
|
||||
user_id=user.id, group_id=group.id
|
||||
)
|
||||
user_group_assignemnt = UserGroupAssignmentModel(user_id=user.id, group_id=group.id)
|
||||
db.session.add(user_group_assignemnt)
|
||||
db.session.commit()
|
||||
|
||||
@classmethod
|
||||
def import_permissions_from_yaml_file(
|
||||
cls, raise_if_missing_user: bool = False
|
||||
) -> DesiredPermissionDict:
|
||||
def import_permissions_from_yaml_file(cls, raise_if_missing_user: bool = False) -> DesiredPermissionDict:
|
||||
"""Import_permissions_from_yaml_file."""
|
||||
if current_app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"] is None:
|
||||
raise (
|
||||
PermissionsFileNotSetError(
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME needs to be set in"
|
||||
" order to import permissions"
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME needs to be set in order to import permissions"
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -234,11 +210,7 @@ class AuthorizationService:
|
|||
user = UserModel.query.filter_by(username=username).first()
|
||||
if user is None:
|
||||
if raise_if_missing_user:
|
||||
raise (
|
||||
UserNotFoundError(
|
||||
f"Could not find a user with name: {username}"
|
||||
)
|
||||
)
|
||||
raise (UserNotFoundError(f"Could not find a user with name: {username}"))
|
||||
continue
|
||||
user_to_group_dict: UserToGroupDict = {
|
||||
"username": user.username,
|
||||
|
@ -249,9 +221,7 @@ class AuthorizationService:
|
|||
|
||||
permission_assignments = []
|
||||
if "permissions" in permission_configs:
|
||||
for _permission_identifier, permission_config in permission_configs[
|
||||
"permissions"
|
||||
].items():
|
||||
for _permission_identifier, permission_config in permission_configs["permissions"].items():
|
||||
uri = permission_config["uri"]
|
||||
permission_target = cls.find_or_create_permission_target(uri)
|
||||
|
||||
|
@ -272,9 +242,7 @@ class AuthorizationService:
|
|||
user = UserModel.query.filter_by(username=username).first()
|
||||
if user is not None:
|
||||
principal = (
|
||||
PrincipalModel.query.join(UserModel)
|
||||
.filter(UserModel.username == username)
|
||||
.first()
|
||||
PrincipalModel.query.join(UserModel).filter(UserModel.username == username).first()
|
||||
)
|
||||
permission_assignments.append(
|
||||
cls.create_permission_for_principal(
|
||||
|
@ -297,9 +265,9 @@ class AuthorizationService:
|
|||
"""Find_or_create_permission_target."""
|
||||
uri_with_percent = re.sub(r"\*", "%", uri)
|
||||
target_uri_normalized = uri_with_percent.removeprefix(V1_API_PATH_PREFIX)
|
||||
permission_target: Optional[PermissionTargetModel] = (
|
||||
PermissionTargetModel.query.filter_by(uri=target_uri_normalized).first()
|
||||
)
|
||||
permission_target: Optional[PermissionTargetModel] = PermissionTargetModel.query.filter_by(
|
||||
uri=target_uri_normalized
|
||||
).first()
|
||||
if permission_target is None:
|
||||
permission_target = PermissionTargetModel(uri=target_uri_normalized)
|
||||
db.session.add(permission_target)
|
||||
|
@ -314,13 +282,11 @@ class AuthorizationService:
|
|||
permission: str,
|
||||
) -> PermissionAssignmentModel:
|
||||
"""Create_permission_for_principal."""
|
||||
permission_assignment: Optional[PermissionAssignmentModel] = (
|
||||
PermissionAssignmentModel.query.filter_by(
|
||||
principal_id=principal.id,
|
||||
permission_target_id=permission_target.id,
|
||||
permission=permission,
|
||||
).first()
|
||||
)
|
||||
permission_assignment: Optional[PermissionAssignmentModel] = PermissionAssignmentModel.query.filter_by(
|
||||
principal_id=principal.id,
|
||||
permission_target_id=permission_target.id,
|
||||
permission=permission,
|
||||
).first()
|
||||
if permission_assignment is None:
|
||||
permission_assignment = PermissionAssignmentModel(
|
||||
principal_id=principal.id,
|
||||
|
@ -400,10 +366,7 @@ class AuthorizationService:
|
|||
)
|
||||
|
||||
api_view_function = current_app.view_functions[request.endpoint]
|
||||
if (
|
||||
api_view_function
|
||||
and api_view_function.__name__ in authorization_exclusion_list
|
||||
):
|
||||
if api_view_function and api_view_function.__name__ in authorization_exclusion_list:
|
||||
return None
|
||||
|
||||
permission_string = cls.get_permission_from_http_method(request.method)
|
||||
|
@ -443,10 +406,7 @@ class AuthorizationService:
|
|||
) from exception
|
||||
except jwt.InvalidTokenError as exception:
|
||||
raise TokenInvalidError(
|
||||
(
|
||||
"The Authentication token you provided is invalid. You need a new"
|
||||
" token. "
|
||||
),
|
||||
"The Authentication token you provided is invalid. You need a new token. ",
|
||||
) from exception
|
||||
|
||||
@staticmethod
|
||||
|
@ -506,9 +466,7 @@ class AuthorizationService:
|
|||
):
|
||||
if tenant_specific_field in user_info:
|
||||
field_number = field_index + 1
|
||||
user_attributes[f"tenant_specific_field_{field_number}"] = user_info[
|
||||
tenant_specific_field
|
||||
]
|
||||
user_attributes[f"tenant_specific_field_{field_number}"] = user_info[tenant_specific_field]
|
||||
|
||||
# example value for service: http://localhost:7002/realms/spiffworkflow (keycloak url)
|
||||
user_model = (
|
||||
|
@ -567,9 +525,7 @@ class AuthorizationService:
|
|||
# 2. view the logs for these instances.
|
||||
if permission_set == "start":
|
||||
target_uri = f"/process-instances/{process_related_path_segment}"
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission="create", target_uri=target_uri)
|
||||
)
|
||||
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri=target_uri))
|
||||
|
||||
# giving people access to all logs for an instance actually gives them a little bit more access
|
||||
# than would be optimal. ideally, you would only be able to view the logs for instances that you started
|
||||
|
@ -586,28 +542,18 @@ class AuthorizationService:
|
|||
f"/logs/{process_related_path_segment}",
|
||||
f"/process-data-file-download/{process_related_path_segment}",
|
||||
]:
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission="read", target_uri=target_uri)
|
||||
)
|
||||
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri=target_uri))
|
||||
else:
|
||||
if permission_set == "all":
|
||||
for path_segment_dict in PATH_SEGMENTS_FOR_PERMISSION_ALL:
|
||||
target_uri = (
|
||||
f"{path_segment_dict['path']}/{process_related_path_segment}"
|
||||
)
|
||||
target_uri = f"{path_segment_dict['path']}/{process_related_path_segment}"
|
||||
relevant_permissions = path_segment_dict["relevant_permissions"]
|
||||
for permission in relevant_permissions:
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(
|
||||
permission=permission, target_uri=target_uri
|
||||
)
|
||||
)
|
||||
permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri=target_uri))
|
||||
|
||||
for target_uri in target_uris:
|
||||
for permission in permissions:
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission=permission, target_uri=target_uri)
|
||||
)
|
||||
permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri=target_uri))
|
||||
|
||||
return permissions_to_assign
|
||||
|
||||
|
@ -615,48 +561,26 @@ class AuthorizationService:
|
|||
def set_basic_permissions(cls) -> list[PermissionToAssign]:
|
||||
"""Set_basic_permissions."""
|
||||
permissions_to_assign: list[PermissionToAssign] = []
|
||||
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/process-instances/for-me"))
|
||||
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/processes"))
|
||||
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/service-tasks"))
|
||||
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/user-groups/for-current-user"))
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(
|
||||
permission="read", target_uri="/process-instances/for-me"
|
||||
)
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission="read", target_uri="/processes")
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission="read", target_uri="/service-tasks")
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(
|
||||
permission="read", target_uri="/user-groups/for-current-user"
|
||||
)
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(
|
||||
permission="read", target_uri="/process-instances/find-by-id/*"
|
||||
)
|
||||
PermissionToAssign(permission="read", target_uri="/process-instances/find-by-id/*")
|
||||
)
|
||||
|
||||
for permission in ["create", "read", "update", "delete"]:
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(
|
||||
permission=permission, target_uri="/process-instances/reports/*"
|
||||
)
|
||||
)
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission=permission, target_uri="/tasks/*")
|
||||
PermissionToAssign(permission=permission, target_uri="/process-instances/reports/*")
|
||||
)
|
||||
permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri="/tasks/*"))
|
||||
return permissions_to_assign
|
||||
|
||||
@classmethod
|
||||
def set_process_group_permissions(
|
||||
cls, target: str, permission_set: str
|
||||
) -> list[PermissionToAssign]:
|
||||
def set_process_group_permissions(cls, target: str, permission_set: str) -> list[PermissionToAssign]:
|
||||
"""Set_process_group_permissions."""
|
||||
permissions_to_assign: list[PermissionToAssign] = []
|
||||
process_group_identifier = (
|
||||
target.removeprefix("PG:").replace("/", ":").removeprefix(":")
|
||||
)
|
||||
process_group_identifier = target.removeprefix("PG:").replace("/", ":").removeprefix(":")
|
||||
process_related_path_segment = f"{process_group_identifier}:*"
|
||||
if process_group_identifier == "ALL":
|
||||
process_related_path_segment = "*"
|
||||
|
@ -670,14 +594,10 @@ class AuthorizationService:
|
|||
return permissions_to_assign
|
||||
|
||||
@classmethod
|
||||
def set_process_model_permissions(
|
||||
cls, target: str, permission_set: str
|
||||
) -> list[PermissionToAssign]:
|
||||
def set_process_model_permissions(cls, target: str, permission_set: str) -> list[PermissionToAssign]:
|
||||
"""Set_process_model_permissions."""
|
||||
permissions_to_assign: list[PermissionToAssign] = []
|
||||
process_model_identifier = (
|
||||
target.removeprefix("PM:").replace("/", ":").removeprefix(":")
|
||||
)
|
||||
process_model_identifier = target.removeprefix("PM:").replace("/", ":").removeprefix(":")
|
||||
process_related_path_segment = f"{process_model_identifier}/*"
|
||||
|
||||
if process_model_identifier == "ALL":
|
||||
|
@ -690,9 +610,7 @@ class AuthorizationService:
|
|||
return permissions_to_assign
|
||||
|
||||
@classmethod
|
||||
def explode_permissions(
|
||||
cls, permission_set: str, target: str
|
||||
) -> list[PermissionToAssign]:
|
||||
def explode_permissions(cls, permission_set: str, target: str) -> list[PermissionToAssign]:
|
||||
"""Explodes given permissions to and returns list of PermissionToAssign objects.
|
||||
|
||||
These can be used to then iterate through and inserted into the database.
|
||||
|
@ -719,30 +637,20 @@ class AuthorizationService:
|
|||
permissions = ["create", "read", "update", "delete"]
|
||||
|
||||
if target.startswith("PG:"):
|
||||
permissions_to_assign += cls.set_process_group_permissions(
|
||||
target, permission_set
|
||||
)
|
||||
permissions_to_assign += cls.set_process_group_permissions(target, permission_set)
|
||||
elif target.startswith("PM:"):
|
||||
permissions_to_assign += cls.set_process_model_permissions(
|
||||
target, permission_set
|
||||
)
|
||||
permissions_to_assign += cls.set_process_model_permissions(target, permission_set)
|
||||
elif permission_set == "start":
|
||||
raise InvalidPermissionError(
|
||||
"Permission 'start' is only available for macros PM and PG."
|
||||
)
|
||||
raise InvalidPermissionError("Permission 'start' is only available for macros PM and PG.")
|
||||
|
||||
elif target.startswith("BASIC"):
|
||||
permissions_to_assign += cls.set_basic_permissions()
|
||||
elif target == "ALL":
|
||||
for permission in permissions:
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission=permission, target_uri="/*")
|
||||
)
|
||||
permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri="/*"))
|
||||
elif target.startswith("/"):
|
||||
for permission in permissions:
|
||||
permissions_to_assign.append(
|
||||
PermissionToAssign(permission=permission, target_uri=target)
|
||||
)
|
||||
permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri=target))
|
||||
else:
|
||||
raise InvalidPermissionError(
|
||||
f"Target uri '{target}' with permission set '{permission_set}' is"
|
||||
|
@ -761,9 +669,7 @@ class AuthorizationService:
|
|||
permissions_to_assign = cls.explode_permissions(permission, target)
|
||||
permission_assignments = []
|
||||
for permission_to_assign in permissions_to_assign:
|
||||
permission_target = cls.find_or_create_permission_target(
|
||||
permission_to_assign.target_uri
|
||||
)
|
||||
permission_target = cls.find_or_create_permission_target(permission_to_assign.target_uri)
|
||||
permission_assignments.append(
|
||||
cls.create_permission_for_principal(
|
||||
group.principal, permission_target, permission_to_assign.permission
|
||||
|
@ -789,9 +695,7 @@ class AuthorizationService:
|
|||
"group_identifier": group_identifier,
|
||||
}
|
||||
desired_user_to_group_identifiers.append(user_to_group_dict)
|
||||
GroupService.add_user_to_group_or_add_to_waiting(
|
||||
username, group_identifier
|
||||
)
|
||||
GroupService.add_user_to_group_or_add_to_waiting(username, group_identifier)
|
||||
desired_group_identifiers.add(group_identifier)
|
||||
for permission in group["permissions"]:
|
||||
for crud_op in permission["actions"]:
|
||||
|
@ -812,8 +716,7 @@ class AuthorizationService:
|
|||
# do not remove users from the default user group
|
||||
if (
|
||||
current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"] is None
|
||||
or current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"]
|
||||
!= iutga.group.identifier
|
||||
or current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"] != iutga.group.identifier
|
||||
):
|
||||
current_user_dict: UserToGroupDict = {
|
||||
"username": iutga.user.username,
|
||||
|
@ -823,12 +726,8 @@ class AuthorizationService:
|
|||
db.session.delete(iutga)
|
||||
|
||||
# do not remove the default user group
|
||||
desired_group_identifiers.add(
|
||||
current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"]
|
||||
)
|
||||
groups_to_delete = GroupModel.query.filter(
|
||||
GroupModel.identifier.not_in(desired_group_identifiers)
|
||||
).all()
|
||||
desired_group_identifiers.add(current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"])
|
||||
groups_to_delete = GroupModel.query.filter(GroupModel.identifier.not_in(desired_group_identifiers)).all()
|
||||
for gtd in groups_to_delete:
|
||||
db.session.delete(gtd)
|
||||
db.session.commit()
|
||||
|
|
|
@ -28,9 +28,7 @@ class BackgroundProcessingService:
|
|||
"""Since this runs in a scheduler, we need to specify the app context as well."""
|
||||
with self.app.app_context():
|
||||
ProcessInstanceLockService.set_thread_local_locking_context("bg:userinput")
|
||||
ProcessInstanceService.do_waiting(
|
||||
ProcessInstanceStatus.user_input_required.value
|
||||
)
|
||||
ProcessInstanceService.do_waiting(ProcessInstanceStatus.user_input_required.value)
|
||||
|
||||
def process_message_instances_with_app_context(self) -> None:
|
||||
"""Since this runs in a scheduler, we need to specify the app context as well."""
|
||||
|
|
|
@ -25,22 +25,14 @@ class ErrorHandlingService:
|
|||
@staticmethod
|
||||
def set_instance_status(instance_id: int, status: str) -> None:
|
||||
"""Set_instance_status."""
|
||||
instance = (
|
||||
db.session.query(ProcessInstanceModel)
|
||||
.filter(ProcessInstanceModel.id == instance_id)
|
||||
.first()
|
||||
)
|
||||
instance = db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.id == instance_id).first()
|
||||
if instance:
|
||||
instance.status = status
|
||||
db.session.commit()
|
||||
|
||||
def handle_error(
|
||||
self, _processor: ProcessInstanceProcessor, _error: Union[ApiError, Exception]
|
||||
) -> None:
|
||||
def handle_error(self, _processor: ProcessInstanceProcessor, _error: Union[ApiError, Exception]) -> None:
|
||||
"""On unhandled exceptions, set instance.status based on model.fault_or_suspend_on_exception."""
|
||||
process_model = ProcessModelService.get_process_model(
|
||||
_processor.process_model_identifier
|
||||
)
|
||||
process_model = ProcessModelService.get_process_model(_processor.process_model_identifier)
|
||||
# First, suspend or fault the instance
|
||||
if process_model.fault_or_suspend_on_exception == "suspend":
|
||||
self.set_instance_status(
|
||||
|
@ -72,8 +64,7 @@ class ErrorHandlingService:
|
|||
) -> None:
|
||||
"""Send a BPMN Message - which may kick off a waiting process."""
|
||||
message_text = (
|
||||
f"There was an exception running process {process_model.id}.\nOriginal"
|
||||
f" Error:\n{error.__repr__()}"
|
||||
f"There was an exception running process {process_model.id}.\nOriginal Error:\n{error.__repr__()}"
|
||||
)
|
||||
message_payload = {
|
||||
"message_text": message_text,
|
||||
|
|
|
@ -84,17 +84,13 @@ class FileSystemService:
|
|||
@staticmethod
|
||||
def workflow_path(spec: ProcessModelInfo) -> str:
|
||||
"""Workflow_path."""
|
||||
process_model_path = os.path.join(
|
||||
FileSystemService.root_path(), spec.id_for_file_path()
|
||||
)
|
||||
process_model_path = os.path.join(FileSystemService.root_path(), spec.id_for_file_path())
|
||||
return process_model_path
|
||||
|
||||
@staticmethod
|
||||
def full_path_to_process_model_file(spec: ProcessModelInfo) -> str:
|
||||
"""Full_path_to_process_model_file."""
|
||||
return os.path.join(
|
||||
FileSystemService.workflow_path(spec), spec.primary_file_name # type: ignore
|
||||
)
|
||||
return os.path.join(FileSystemService.workflow_path(spec), spec.primary_file_name) # type: ignore
|
||||
|
||||
def next_display_order(self, spec: ProcessModelInfo) -> int:
|
||||
"""Next_display_order."""
|
||||
|
@ -124,8 +120,7 @@ class FileSystemService:
|
|||
if file_extension not in FileType.list():
|
||||
raise ApiError(
|
||||
"unknown_extension",
|
||||
"The file you provided does not have an accepted extension:"
|
||||
+ file_extension,
|
||||
"The file you provided does not have an accepted extension:" + file_extension,
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
|
@ -173,9 +168,7 @@ class FileSystemService:
|
|||
content_type = CONTENT_TYPES[file_type.name]
|
||||
last_modified = FileSystemService._last_modified(file_path)
|
||||
size = os.path.getsize(file_path)
|
||||
file = File.from_file_system(
|
||||
file_name, file_type, content_type, last_modified, size
|
||||
)
|
||||
file = File.from_file_system(file_name, file_type, content_type, last_modified, size)
|
||||
return file
|
||||
|
||||
@staticmethod
|
||||
|
@ -193,6 +186,4 @@ class FileSystemService:
|
|||
stats = item.stat()
|
||||
file_size = stats.st_size
|
||||
last_modified = FileSystemService._last_modified(item.path)
|
||||
return File.from_file_system(
|
||||
item.name, file_type, content_type, last_modified, file_size
|
||||
)
|
||||
return File.from_file_system(item.name, file_type, content_type, last_modified, file_size)
|
||||
|
|
|
@ -37,14 +37,10 @@ class GitService:
|
|||
@classmethod
|
||||
def get_current_revision(cls) -> str:
|
||||
"""Get_current_revision."""
|
||||
bpmn_spec_absolute_dir = current_app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"
|
||||
]
|
||||
bpmn_spec_absolute_dir = current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"]
|
||||
# The value includes a carriage return character at the end, so we don't grab the last character
|
||||
with FileSystemService.cd(bpmn_spec_absolute_dir):
|
||||
return cls.run_shell_command_to_get_stdout(
|
||||
["git", "rev-parse", "--short", "HEAD"]
|
||||
)
|
||||
return cls.run_shell_command_to_get_stdout(["git", "rev-parse", "--short", "HEAD"])
|
||||
|
||||
@classmethod
|
||||
def get_instance_file_contents_for_revision(
|
||||
|
@ -54,12 +50,8 @@ class GitService:
|
|||
file_name: Optional[str] = None,
|
||||
) -> str:
|
||||
"""Get_instance_file_contents_for_revision."""
|
||||
bpmn_spec_absolute_dir = current_app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"
|
||||
]
|
||||
process_model_relative_path = FileSystemService.process_model_relative_path(
|
||||
process_model
|
||||
)
|
||||
bpmn_spec_absolute_dir = current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"]
|
||||
process_model_relative_path = FileSystemService.process_model_relative_path(process_model)
|
||||
file_name_to_use = file_name
|
||||
if file_name_to_use is None:
|
||||
file_name_to_use = process_model.primary_file_name
|
||||
|
@ -82,22 +74,14 @@ class GitService:
|
|||
cls.check_for_basic_configs()
|
||||
branch_name_to_use = branch_name
|
||||
if branch_name_to_use is None:
|
||||
branch_name_to_use = current_app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH"
|
||||
]
|
||||
branch_name_to_use = current_app.config["SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH"]
|
||||
repo_path_to_use = repo_path
|
||||
if repo_path is None:
|
||||
repo_path_to_use = current_app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"
|
||||
]
|
||||
repo_path_to_use = current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"]
|
||||
if repo_path_to_use is None:
|
||||
raise ConfigurationError(
|
||||
"SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR config must be set"
|
||||
)
|
||||
raise ConfigurationError("SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR config must be set")
|
||||
|
||||
shell_command_path = os.path.join(
|
||||
current_app.root_path, "..", "..", "bin", "git_commit_bpmn_models_repo"
|
||||
)
|
||||
shell_command_path = os.path.join(current_app.root_path, "..", "..", "bin", "git_commit_bpmn_models_repo")
|
||||
shell_command = [
|
||||
shell_command_path,
|
||||
repo_path_to_use,
|
||||
|
@ -119,10 +103,7 @@ class GitService:
|
|||
def check_for_publish_configs(cls) -> None:
|
||||
"""Check_for_configs."""
|
||||
cls.check_for_basic_configs()
|
||||
if (
|
||||
current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"]
|
||||
is None
|
||||
):
|
||||
if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"] is None:
|
||||
raise MissingGitConfigsError(
|
||||
"Missing config for SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH. "
|
||||
"This is required for publishing process models"
|
||||
|
@ -155,29 +136,21 @@ class GitService:
|
|||
) -> Union[subprocess.CompletedProcess[bytes], bool]:
|
||||
"""Run_shell_command."""
|
||||
my_env = os.environ.copy()
|
||||
my_env["GIT_COMMITTER_NAME"] = (
|
||||
current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_USERNAME") or "unknown"
|
||||
)
|
||||
my_env["GIT_COMMITTER_NAME"] = current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_USERNAME") or "unknown"
|
||||
|
||||
my_env["GIT_COMMITTER_EMAIL"] = (
|
||||
current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL")
|
||||
or "unknown@example.org"
|
||||
current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL") or "unknown@example.org"
|
||||
)
|
||||
|
||||
# SSH authentication can be also provided via gitconfig.
|
||||
ssh_key_path = current_app.config.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH"
|
||||
)
|
||||
ssh_key_path = current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH")
|
||||
if ssh_key_path is not None:
|
||||
my_env["GIT_SSH_COMMAND"] = (
|
||||
"ssh -F /dev/null -o UserKnownHostsFile=/dev/null -o"
|
||||
" StrictHostKeyChecking=no -i %s" % ssh_key_path
|
||||
"ssh -F /dev/null -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i %s" % ssh_key_path
|
||||
)
|
||||
|
||||
# this is fine since we pass the commands directly
|
||||
result = subprocess.run( # noqa
|
||||
command, check=False, capture_output=True, env=my_env
|
||||
)
|
||||
result = subprocess.run(command, check=False, capture_output=True, env=my_env) # noqa
|
||||
|
||||
if return_success_state:
|
||||
return result.returncode == 0
|
||||
|
@ -185,11 +158,7 @@ class GitService:
|
|||
if result.returncode != 0:
|
||||
stdout = result.stdout.decode("utf-8")
|
||||
stderr = result.stderr.decode("utf-8")
|
||||
raise GitCommandError(
|
||||
f"Failed to execute git command: {command}"
|
||||
f"Stdout: {stdout}"
|
||||
f"Stderr: {stderr}"
|
||||
)
|
||||
raise GitCommandError(f"Failed to execute git command: {command}Stdout: {stdout}Stderr: {stderr}")
|
||||
|
||||
return result
|
||||
|
||||
|
@ -201,19 +170,16 @@ class GitService:
|
|||
|
||||
if "repository" not in webhook or "clone_url" not in webhook["repository"]:
|
||||
raise InvalidGitWebhookBodyError(
|
||||
"Cannot find required keys of 'repository:clone_url' from webhook"
|
||||
f" body: {webhook}"
|
||||
f"Cannot find required keys of 'repository:clone_url' from webhook body: {webhook}"
|
||||
)
|
||||
|
||||
config_clone_url = current_app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL"
|
||||
]
|
||||
config_clone_url = current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL"]
|
||||
repo = webhook["repository"]
|
||||
valid_clone_urls = [repo["clone_url"], repo["git_url"], repo["ssh_url"]]
|
||||
if config_clone_url not in valid_clone_urls:
|
||||
raise GitCloneUrlMismatchError(
|
||||
"Configured clone url does not match the repo URLs from webhook: %s"
|
||||
" =/= %s" % (config_clone_url, valid_clone_urls)
|
||||
"Configured clone url does not match the repo URLs from webhook: %s =/= %s"
|
||||
% (config_clone_url, valid_clone_urls)
|
||||
)
|
||||
|
||||
# Test webhook requests have a zen koan and hook info.
|
||||
|
@ -221,9 +187,7 @@ class GitService:
|
|||
return False
|
||||
|
||||
if "ref" not in webhook:
|
||||
raise InvalidGitWebhookBodyError(
|
||||
f"Could not find the 'ref' arg in the webhook boy: {webhook}"
|
||||
)
|
||||
raise InvalidGitWebhookBodyError(f"Could not find the 'ref' arg in the webhook boy: {webhook}")
|
||||
|
||||
if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH"] is None:
|
||||
raise MissingGitConfigsError(
|
||||
|
@ -236,9 +200,7 @@ class GitService:
|
|||
if ref != f"refs/heads/{git_branch}":
|
||||
return False
|
||||
|
||||
with FileSystemService.cd(
|
||||
current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"]
|
||||
):
|
||||
with FileSystemService.cd(current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"]):
|
||||
cls.run_shell_command(["git", "pull", "--rebase"])
|
||||
return True
|
||||
|
||||
|
@ -247,9 +209,7 @@ class GitService:
|
|||
"""Publish."""
|
||||
cls.check_for_publish_configs()
|
||||
source_process_model_root = FileSystemService.root_path()
|
||||
source_process_model_path = os.path.join(
|
||||
source_process_model_root, process_model_id
|
||||
)
|
||||
source_process_model_path = os.path.join(source_process_model_root, process_model_id)
|
||||
unique_hex = uuid.uuid4().hex
|
||||
clone_dir = f"sample-process-models.{unique_hex}"
|
||||
|
||||
|
@ -257,9 +217,7 @@ class GitService:
|
|||
# we are adding a guid to this so the flake8 issue has been mitigated
|
||||
destination_process_root = f"/tmp/{clone_dir}" # noqa
|
||||
|
||||
git_clone_url = current_app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL"
|
||||
]
|
||||
git_clone_url = current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL"]
|
||||
cmd = ["git", "clone", git_clone_url, destination_process_root]
|
||||
|
||||
cls.run_shell_command(cmd)
|
||||
|
@ -281,9 +239,7 @@ class GitService:
|
|||
cls.run_shell_command(["git", "checkout", "-b", branch_to_pull_request])
|
||||
|
||||
# copy files from process model into the new publish branch
|
||||
destination_process_model_path = os.path.join(
|
||||
destination_process_root, process_model_id
|
||||
)
|
||||
destination_process_model_path = os.path.join(destination_process_root, process_model_id)
|
||||
if os.path.exists(destination_process_model_path):
|
||||
shutil.rmtree(destination_process_model_path)
|
||||
shutil.copytree(source_process_model_path, destination_process_model_path)
|
||||
|
@ -296,9 +252,7 @@ class GitService:
|
|||
cls.commit(commit_message, destination_process_root, branch_to_pull_request)
|
||||
|
||||
# build url for github page to open PR
|
||||
git_remote = cls.run_shell_command_to_get_stdout(
|
||||
["git", "config", "--get", "remote.origin.url"]
|
||||
)
|
||||
git_remote = cls.run_shell_command_to_get_stdout(["git", "config", "--get", "remote.origin.url"])
|
||||
remote_url = git_remote.strip().replace(".git", "")
|
||||
pr_url = f"{remote_url}/compare/{branch_to_update}...{branch_to_pull_request}?expand=1"
|
||||
|
||||
|
|
|
@ -13,9 +13,7 @@ class GroupService:
|
|||
@classmethod
|
||||
def find_or_create_group(cls, group_identifier: str) -> GroupModel:
|
||||
"""Find_or_create_group."""
|
||||
group: Optional[GroupModel] = GroupModel.query.filter_by(
|
||||
identifier=group_identifier
|
||||
).first()
|
||||
group: Optional[GroupModel] = GroupModel.query.filter_by(identifier=group_identifier).first()
|
||||
if group is None:
|
||||
group = GroupModel(identifier=group_identifier)
|
||||
db.session.add(group)
|
||||
|
@ -24,9 +22,7 @@ class GroupService:
|
|||
return group
|
||||
|
||||
@classmethod
|
||||
def add_user_to_group_or_add_to_waiting(
|
||||
cls, username: str, group_identifier: str
|
||||
) -> None:
|
||||
def add_user_to_group_or_add_to_waiting(cls, username: str, group_identifier: str) -> None:
|
||||
"""Add_user_to_group_or_add_to_waiting."""
|
||||
group = cls.find_or_create_group(group_identifier)
|
||||
user = UserModel.query.filter_by(username=username).first()
|
||||
|
|
|
@ -63,10 +63,7 @@ class JsonFormatter(logging.Formatter):
|
|||
|
||||
KeyError is raised if an unknown attribute is provided in the fmt_dict.
|
||||
"""
|
||||
return {
|
||||
fmt_key: record.__dict__[fmt_val]
|
||||
for fmt_key, fmt_val in self.fmt_dict.items()
|
||||
}
|
||||
return {fmt_key: record.__dict__[fmt_val] for fmt_key, fmt_val in self.fmt_dict.items()}
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
"""Mostly the same as the parent's class method.
|
||||
|
@ -124,15 +121,12 @@ def setup_logger(app: Flask) -> None:
|
|||
|
||||
if upper_log_level_string not in log_levels:
|
||||
raise InvalidLogLevelError(
|
||||
f"Log level given is invalid: '{upper_log_level_string}'. Valid options are"
|
||||
f" {log_levels}"
|
||||
f"Log level given is invalid: '{upper_log_level_string}'. Valid options are {log_levels}"
|
||||
)
|
||||
|
||||
log_level = getattr(logging, upper_log_level_string)
|
||||
spiff_log_level = getattr(logging, upper_log_level_string)
|
||||
log_formatter = logging.Formatter(
|
||||
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
log_formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
||||
|
||||
app.logger.debug("Printing log to create app logger")
|
||||
|
||||
|
@ -235,9 +229,7 @@ class DBHandler(logging.Handler):
|
|||
message = record.msg if hasattr(record, "msg") else None
|
||||
|
||||
current_user_id = None
|
||||
if bpmn_task_type in Task.HUMAN_TASK_TYPES and hasattr(
|
||||
record, "current_user_id"
|
||||
):
|
||||
if bpmn_task_type in Task.HUMAN_TASK_TYPES and hasattr(record, "current_user_id"):
|
||||
current_user_id = record.current_user_id # type: ignore
|
||||
|
||||
spiff_step = (
|
||||
|
|
|
@ -28,9 +28,7 @@ class MessageService:
|
|||
"""MessageService."""
|
||||
|
||||
@classmethod
|
||||
def correlate_send_message(
|
||||
cls, message_instance_send: MessageInstanceModel
|
||||
) -> Optional[MessageInstanceModel]:
|
||||
def correlate_send_message(cls, message_instance_send: MessageInstanceModel) -> Optional[MessageInstanceModel]:
|
||||
"""Connects the given send message to a 'receive' message if possible.
|
||||
|
||||
:param message_instance_send:
|
||||
|
@ -52,18 +50,14 @@ class MessageService:
|
|||
message_instance_receive: Optional[MessageInstanceModel] = None
|
||||
try:
|
||||
for message_instance in available_receive_messages:
|
||||
if message_instance.correlates(
|
||||
message_instance_send, CustomBpmnScriptEngine()
|
||||
):
|
||||
if message_instance.correlates(message_instance_send, CustomBpmnScriptEngine()):
|
||||
message_instance_receive = message_instance
|
||||
|
||||
if message_instance_receive is None:
|
||||
# Check for a message triggerable process and start that to create a new message_instance_receive
|
||||
message_triggerable_process_model = (
|
||||
MessageTriggerableProcessModel.query.filter_by(
|
||||
message_name=message_instance_send.name
|
||||
).first()
|
||||
)
|
||||
message_triggerable_process_model = MessageTriggerableProcessModel.query.filter_by(
|
||||
message_name=message_instance_send.name
|
||||
).first()
|
||||
if message_triggerable_process_model:
|
||||
receiving_process = MessageService.start_process_with_message(
|
||||
message_triggerable_process_model, message_instance_send
|
||||
|
@ -74,17 +68,10 @@ class MessageService:
|
|||
status="ready",
|
||||
).first()
|
||||
else:
|
||||
receiving_process = (
|
||||
MessageService.get_process_instance_for_message_instance(
|
||||
message_instance_receive
|
||||
)
|
||||
)
|
||||
receiving_process = MessageService.get_process_instance_for_message_instance(message_instance_receive)
|
||||
|
||||
# Assure we can send the message, otherwise keep going.
|
||||
if (
|
||||
message_instance_receive is None
|
||||
or not receiving_process.can_receive_message()
|
||||
):
|
||||
if message_instance_receive is None or not receiving_process.can_receive_message():
|
||||
message_instance_send.status = "ready"
|
||||
message_instance_send.status = "ready"
|
||||
db.session.add(message_instance_send)
|
||||
|
@ -124,9 +111,7 @@ class MessageService:
|
|||
@classmethod
|
||||
def correlate_all_message_instances(cls) -> None:
|
||||
"""Look at ALL the Send and Receive Messages and attempt to find correlations."""
|
||||
message_instances_send = MessageInstanceModel.query.filter_by(
|
||||
message_type="send", status="ready"
|
||||
).all()
|
||||
message_instances_send = MessageInstanceModel.query.filter_by(message_type="send", status="ready").all()
|
||||
|
||||
for message_instance_send in message_instances_send:
|
||||
cls.correlate_send_message(message_instance_send)
|
||||
|
@ -150,11 +135,9 @@ class MessageService:
|
|||
message_instance_receive: MessageInstanceModel,
|
||||
) -> ProcessInstanceModel:
|
||||
"""Process_message_receive."""
|
||||
process_instance_receive: ProcessInstanceModel = (
|
||||
ProcessInstanceModel.query.filter_by(
|
||||
id=message_instance_receive.process_instance_id
|
||||
).first()
|
||||
)
|
||||
process_instance_receive: ProcessInstanceModel = ProcessInstanceModel.query.filter_by(
|
||||
id=message_instance_receive.process_instance_id
|
||||
).first()
|
||||
if process_instance_receive is None:
|
||||
raise MessageServiceError(
|
||||
(
|
||||
|
@ -176,9 +159,7 @@ class MessageService:
|
|||
) -> None:
|
||||
"""process_message_receive."""
|
||||
processor_receive = ProcessInstanceProcessor(process_instance_receive)
|
||||
processor_receive.bpmn_process_instance.catch_bpmn_message(
|
||||
message_model_name, message_payload
|
||||
)
|
||||
processor_receive.bpmn_process_instance.catch_bpmn_message(message_model_name, message_payload)
|
||||
processor_receive.do_engine_steps(save=True)
|
||||
message_instance_receive.status = MessageStatuses.completed.value
|
||||
db.session.add(message_instance_receive)
|
||||
|
|
|
@ -35,9 +35,7 @@ class ProcessInstanceLockService:
|
|||
return f"{ctx['domain']}:{ctx['uuid']}:{ctx['thread_id']}"
|
||||
|
||||
@classmethod
|
||||
def lock(
|
||||
cls, process_instance_id: int, queue_entry: ProcessInstanceQueueModel
|
||||
) -> None:
|
||||
def lock(cls, process_instance_id: int, queue_entry: ProcessInstanceQueueModel) -> None:
|
||||
ctx = cls.get_thread_local_locking_context()
|
||||
ctx["locks"][process_instance_id] = queue_entry
|
||||
|
||||
|
@ -55,9 +53,7 @@ class ProcessInstanceLockService:
|
|||
return ctx["locks"].pop(process_instance_id) # type: ignore
|
||||
|
||||
@classmethod
|
||||
def try_unlock(
|
||||
cls, process_instance_id: int
|
||||
) -> Optional[ProcessInstanceQueueModel]:
|
||||
def try_unlock(cls, process_instance_id: int) -> Optional[ProcessInstanceQueueModel]:
|
||||
ctx = cls.get_thread_local_locking_context()
|
||||
return ctx["locks"].pop(process_instance_id, None) # type: ignore
|
||||
|
||||
|
|
|
@ -171,9 +171,7 @@ class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # ty
|
|||
super().execute(script, context, external_methods)
|
||||
self._last_result = context
|
||||
|
||||
def user_defined_state(
|
||||
self, external_methods: Optional[Dict[str, Any]] = None
|
||||
) -> Dict[str, Any]:
|
||||
def user_defined_state(self, external_methods: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
||||
return {}
|
||||
|
||||
def last_result(self) -> Dict[str, Any]:
|
||||
|
@ -201,9 +199,7 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment)
|
|||
def __init__(self, environment_globals: Dict[str, Any]):
|
||||
"""NonTaskDataBasedScriptEngineEnvironment."""
|
||||
self.state: Dict[str, Any] = {}
|
||||
self.non_user_defined_keys = set(
|
||||
[*environment_globals.keys()] + ["__builtins__"]
|
||||
)
|
||||
self.non_user_defined_keys = set([*environment_globals.keys()] + ["__builtins__"])
|
||||
super().__init__(environment_globals)
|
||||
|
||||
def evaluate(
|
||||
|
@ -249,18 +245,12 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment)
|
|||
# the state will be removed later once the task is completed.
|
||||
context.update(self.state)
|
||||
|
||||
def user_defined_state(
|
||||
self, external_methods: Optional[Dict[str, Any]] = None
|
||||
) -> Dict[str, Any]:
|
||||
def user_defined_state(self, external_methods: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
||||
keys_to_filter = self.non_user_defined_keys
|
||||
if external_methods is not None:
|
||||
keys_to_filter |= set(external_methods.keys())
|
||||
|
||||
return {
|
||||
k: v
|
||||
for k, v in self.state.items()
|
||||
if k not in keys_to_filter and not callable(v)
|
||||
}
|
||||
return {k: v for k, v in self.state.items() if k not in keys_to_filter and not callable(v)}
|
||||
|
||||
def last_result(self) -> Dict[str, Any]:
|
||||
return {k: v for k, v in self.state.items()}
|
||||
|
@ -286,9 +276,7 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment)
|
|||
state_keys_to_remove = state_keys - task_data_keys
|
||||
task_data_keys_to_keep = task_data_keys - state_keys
|
||||
|
||||
self.state = {
|
||||
k: v for k, v in self.state.items() if k not in state_keys_to_remove
|
||||
}
|
||||
self.state = {k: v for k, v in self.state.items() if k not in state_keys_to_remove}
|
||||
task.data = {k: v for k, v in task.data.items() if k in task_data_keys_to_keep}
|
||||
|
||||
if hasattr(task.task_spec, "_result_variable"):
|
||||
|
@ -380,20 +368,16 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
|
|||
except Exception as exception:
|
||||
if task is None:
|
||||
raise WorkflowException(
|
||||
"Error evaluating expression: '%s', %s"
|
||||
% (expression, str(exception)),
|
||||
f"Error evaluating expression: '{expression}', {str(exception)}",
|
||||
) from exception
|
||||
else:
|
||||
raise WorkflowTaskException(
|
||||
"Error evaluating expression '%s', %s"
|
||||
% (expression, str(exception)),
|
||||
f"Error evaluating expression '{expression}', {str(exception)}",
|
||||
task=task,
|
||||
exception=exception,
|
||||
) from exception
|
||||
|
||||
def execute(
|
||||
self, task: SpiffTask, script: str, external_methods: Any = None
|
||||
) -> None:
|
||||
def execute(self, task: SpiffTask, script: str, external_methods: Any = None) -> None:
|
||||
"""Execute."""
|
||||
try:
|
||||
methods = self.__get_augment_methods(task)
|
||||
|
@ -412,14 +396,10 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
|
|||
task_data: Dict[str, Any],
|
||||
) -> Any:
|
||||
"""CallService."""
|
||||
return ServiceTaskDelegate.call_connector(
|
||||
operation_name, operation_params, task_data
|
||||
)
|
||||
return ServiceTaskDelegate.call_connector(operation_name, operation_params, task_data)
|
||||
|
||||
|
||||
IdToBpmnProcessSpecMapping = NewType(
|
||||
"IdToBpmnProcessSpecMapping", dict[str, BpmnProcessSpec]
|
||||
)
|
||||
IdToBpmnProcessSpecMapping = NewType("IdToBpmnProcessSpecMapping", dict[str, BpmnProcessSpec])
|
||||
|
||||
|
||||
class ProcessInstanceProcessor:
|
||||
|
@ -428,9 +408,7 @@ class ProcessInstanceProcessor:
|
|||
_script_engine = CustomBpmnScriptEngine()
|
||||
SERIALIZER_VERSION = "1.0-spiffworkflow-backend"
|
||||
|
||||
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(
|
||||
SPIFF_SPEC_CONFIG
|
||||
)
|
||||
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(SPIFF_SPEC_CONFIG)
|
||||
_serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION)
|
||||
_event_serializer = EventBasedGatewayConverter(wf_spec_converter)
|
||||
|
||||
|
@ -440,9 +418,7 @@ class ProcessInstanceProcessor:
|
|||
# __init__ calls these helpers:
|
||||
# * get_spec, which returns a spec and any subprocesses (as IdToBpmnProcessSpecMapping dict)
|
||||
# * __get_bpmn_process_instance, which takes spec and subprocesses and instantiates and returns a BpmnWorkflow
|
||||
def __init__(
|
||||
self, process_instance_model: ProcessInstanceModel, validate_only: bool = False
|
||||
) -> None:
|
||||
def __init__(self, process_instance_model: ProcessInstanceModel, validate_only: bool = False) -> None:
|
||||
"""Create a Workflow Processor based on the serialized information available in the process_instance model."""
|
||||
tld = current_app.config["THREAD_LOCAL_DATA"]
|
||||
tld.process_instance_id = process_instance_model.id
|
||||
|
@ -476,9 +452,7 @@ class ProcessInstanceProcessor:
|
|||
)
|
||||
|
||||
self.process_model_identifier = process_instance_model.process_model_identifier
|
||||
self.process_model_display_name = (
|
||||
process_instance_model.process_model_display_name
|
||||
)
|
||||
self.process_model_display_name = process_instance_model.process_model_display_name
|
||||
|
||||
try:
|
||||
(
|
||||
|
@ -496,10 +470,7 @@ class ProcessInstanceProcessor:
|
|||
except MissingSpecError as ke:
|
||||
raise ApiError(
|
||||
error_code="unexpected_process_instance_structure",
|
||||
message=(
|
||||
"Failed to deserialize process_instance"
|
||||
" '%s' due to a mis-placed or missing task '%s'"
|
||||
)
|
||||
message="Failed to deserialize process_instance '%s' due to a mis-placed or missing task '%s'"
|
||||
% (self.process_model_identifier, str(ke)),
|
||||
) from ke
|
||||
|
||||
|
@ -508,45 +479,32 @@ class ProcessInstanceProcessor:
|
|||
cls, process_model_identifier: str
|
||||
) -> Tuple[BpmnProcessSpec, IdToBpmnProcessSpecMapping]:
|
||||
"""Get_process_model_and_subprocesses."""
|
||||
process_model_info = ProcessModelService.get_process_model(
|
||||
process_model_identifier
|
||||
)
|
||||
process_model_info = ProcessModelService.get_process_model(process_model_identifier)
|
||||
if process_model_info is None:
|
||||
raise (
|
||||
ApiError(
|
||||
"process_model_not_found",
|
||||
(
|
||||
"The given process model was not found:"
|
||||
f" {process_model_identifier}."
|
||||
),
|
||||
f"The given process model was not found: {process_model_identifier}.",
|
||||
)
|
||||
)
|
||||
spec_files = SpecFileService.get_files(process_model_info)
|
||||
return cls.get_spec(spec_files, process_model_info)
|
||||
|
||||
@classmethod
|
||||
def get_bpmn_process_instance_from_process_model(
|
||||
cls, process_model_identifier: str
|
||||
) -> BpmnWorkflow:
|
||||
def get_bpmn_process_instance_from_process_model(cls, process_model_identifier: str) -> BpmnWorkflow:
|
||||
"""Get_all_bpmn_process_identifiers_for_process_model."""
|
||||
(bpmn_process_spec, subprocesses) = cls.get_process_model_and_subprocesses(
|
||||
process_model_identifier,
|
||||
)
|
||||
return cls.get_bpmn_process_instance_from_workflow_spec(
|
||||
bpmn_process_spec, subprocesses
|
||||
)
|
||||
return cls.get_bpmn_process_instance_from_workflow_spec(bpmn_process_spec, subprocesses)
|
||||
|
||||
@staticmethod
|
||||
def set_script_engine(bpmn_process_instance: BpmnWorkflow) -> None:
|
||||
ProcessInstanceProcessor._script_engine.environment.restore_state(
|
||||
bpmn_process_instance
|
||||
)
|
||||
ProcessInstanceProcessor._script_engine.environment.restore_state(bpmn_process_instance)
|
||||
bpmn_process_instance.script_engine = ProcessInstanceProcessor._script_engine
|
||||
|
||||
def preserve_script_engine_state(self) -> None:
|
||||
ProcessInstanceProcessor._script_engine.environment.preserve_state(
|
||||
self.bpmn_process_instance
|
||||
)
|
||||
ProcessInstanceProcessor._script_engine.environment.preserve_state(self.bpmn_process_instance)
|
||||
|
||||
@classmethod
|
||||
def _update_bpmn_definition_mappings(
|
||||
|
@ -555,16 +513,11 @@ class ProcessInstanceProcessor:
|
|||
bpmn_process_definition_identifier: str,
|
||||
task_definition: TaskDefinitionModel,
|
||||
) -> None:
|
||||
if (
|
||||
bpmn_process_definition_identifier
|
||||
not in bpmn_definition_to_task_definitions_mappings
|
||||
):
|
||||
bpmn_definition_to_task_definitions_mappings[
|
||||
bpmn_process_definition_identifier
|
||||
] = {}
|
||||
bpmn_definition_to_task_definitions_mappings[
|
||||
bpmn_process_definition_identifier
|
||||
][task_definition.bpmn_identifier] = task_definition
|
||||
if bpmn_process_definition_identifier not in bpmn_definition_to_task_definitions_mappings:
|
||||
bpmn_definition_to_task_definitions_mappings[bpmn_process_definition_identifier] = {}
|
||||
bpmn_definition_to_task_definitions_mappings[bpmn_process_definition_identifier][
|
||||
task_definition.bpmn_identifier
|
||||
] = task_definition
|
||||
|
||||
@classmethod
|
||||
def _get_definition_dict_for_bpmn_process_definition(
|
||||
|
@ -608,18 +561,14 @@ class ProcessInstanceProcessor:
|
|||
|
||||
bpmn_subprocess_definition_bpmn_identifiers = {}
|
||||
for bpmn_subprocess_definition in bpmn_process_subprocess_definitions:
|
||||
bpmn_process_definition_dict: dict = (
|
||||
bpmn_subprocess_definition.properties_json
|
||||
)
|
||||
bpmn_process_definition_dict: dict = bpmn_subprocess_definition.properties_json
|
||||
spiff_bpmn_process_dict["subprocess_specs"][
|
||||
bpmn_subprocess_definition.bpmn_identifier
|
||||
] = bpmn_process_definition_dict
|
||||
spiff_bpmn_process_dict["subprocess_specs"][
|
||||
spiff_bpmn_process_dict["subprocess_specs"][bpmn_subprocess_definition.bpmn_identifier]["task_specs"] = {}
|
||||
bpmn_subprocess_definition_bpmn_identifiers[bpmn_subprocess_definition.id] = (
|
||||
bpmn_subprocess_definition.bpmn_identifier
|
||||
]["task_specs"] = {}
|
||||
bpmn_subprocess_definition_bpmn_identifiers[
|
||||
bpmn_subprocess_definition.id
|
||||
] = bpmn_subprocess_definition.bpmn_identifier
|
||||
)
|
||||
|
||||
task_definitions = TaskDefinitionModel.query.filter(
|
||||
TaskDefinitionModel.bpmn_process_definition_id.in_( # type: ignore
|
||||
|
@ -627,29 +576,21 @@ class ProcessInstanceProcessor:
|
|||
)
|
||||
).all()
|
||||
for task_definition in task_definitions:
|
||||
bpmn_subprocess_definition_bpmn_identifier = (
|
||||
bpmn_subprocess_definition_bpmn_identifiers[
|
||||
task_definition.bpmn_process_definition_id
|
||||
]
|
||||
)
|
||||
bpmn_subprocess_definition_bpmn_identifier = bpmn_subprocess_definition_bpmn_identifiers[
|
||||
task_definition.bpmn_process_definition_id
|
||||
]
|
||||
cls._update_bpmn_definition_mappings(
|
||||
bpmn_definition_to_task_definitions_mappings,
|
||||
bpmn_subprocess_definition_bpmn_identifier,
|
||||
task_definition,
|
||||
)
|
||||
spiff_bpmn_process_dict["subprocess_specs"][
|
||||
bpmn_subprocess_definition_bpmn_identifier
|
||||
]["task_specs"][
|
||||
spiff_bpmn_process_dict["subprocess_specs"][bpmn_subprocess_definition_bpmn_identifier]["task_specs"][
|
||||
task_definition.bpmn_identifier
|
||||
] = task_definition.properties_json
|
||||
|
||||
@classmethod
|
||||
def _get_bpmn_process_dict(
|
||||
cls, bpmn_process: BpmnProcessModel, get_tasks: bool = False
|
||||
) -> dict:
|
||||
json_data = JsonDataModel.query.filter_by(
|
||||
hash=bpmn_process.json_data_hash
|
||||
).first()
|
||||
def _get_bpmn_process_dict(cls, bpmn_process: BpmnProcessModel, get_tasks: bool = False) -> dict:
|
||||
json_data = JsonDataModel.query.filter_by(hash=bpmn_process.json_data_hash).first()
|
||||
bpmn_process_dict = {"data": json_data.data, "tasks": {}}
|
||||
bpmn_process_dict.update(bpmn_process.properties_json)
|
||||
if get_tasks:
|
||||
|
@ -674,12 +615,8 @@ class ProcessInstanceProcessor:
|
|||
for task in tasks:
|
||||
tasks_dict = spiff_bpmn_process_dict["tasks"]
|
||||
if bpmn_subprocess_id_to_guid_mappings:
|
||||
bpmn_subprocess_guid = bpmn_subprocess_id_to_guid_mappings[
|
||||
task.bpmn_process_id
|
||||
]
|
||||
tasks_dict = spiff_bpmn_process_dict["subprocesses"][
|
||||
bpmn_subprocess_guid
|
||||
]["tasks"]
|
||||
bpmn_subprocess_guid = bpmn_subprocess_id_to_guid_mappings[task.bpmn_process_id]
|
||||
tasks_dict = spiff_bpmn_process_dict["subprocesses"][bpmn_subprocess_guid]["tasks"]
|
||||
tasks_dict[task.guid] = task.properties_json
|
||||
tasks_dict[task.guid]["data"] = json_data_mappings[task.json_data_hash]
|
||||
|
||||
|
@ -700,11 +637,9 @@ class ProcessInstanceProcessor:
|
|||
}
|
||||
bpmn_process_definition = process_instance_model.bpmn_process_definition
|
||||
if bpmn_process_definition is not None:
|
||||
spiff_bpmn_process_dict["spec"] = (
|
||||
cls._get_definition_dict_for_bpmn_process_definition(
|
||||
bpmn_process_definition,
|
||||
bpmn_definition_to_task_definitions_mappings,
|
||||
)
|
||||
spiff_bpmn_process_dict["spec"] = cls._get_definition_dict_for_bpmn_process_definition(
|
||||
bpmn_process_definition,
|
||||
bpmn_definition_to_task_definitions_mappings,
|
||||
)
|
||||
cls._set_definition_dict_for_bpmn_subprocess_definitions(
|
||||
bpmn_process_definition,
|
||||
|
@ -714,32 +649,20 @@ class ProcessInstanceProcessor:
|
|||
|
||||
bpmn_process = process_instance_model.bpmn_process
|
||||
if bpmn_process is not None:
|
||||
single_bpmn_process_dict = cls._get_bpmn_process_dict(
|
||||
bpmn_process, get_tasks=True
|
||||
)
|
||||
single_bpmn_process_dict = cls._get_bpmn_process_dict(bpmn_process, get_tasks=True)
|
||||
spiff_bpmn_process_dict.update(single_bpmn_process_dict)
|
||||
|
||||
bpmn_subprocesses = BpmnProcessModel.query.filter_by(
|
||||
parent_process_id=bpmn_process.id
|
||||
).all()
|
||||
bpmn_subprocesses = BpmnProcessModel.query.filter_by(parent_process_id=bpmn_process.id).all()
|
||||
bpmn_subprocess_id_to_guid_mappings = {}
|
||||
for bpmn_subprocess in bpmn_subprocesses:
|
||||
bpmn_subprocess_id_to_guid_mappings[bpmn_subprocess.id] = (
|
||||
bpmn_subprocess.guid
|
||||
)
|
||||
single_bpmn_process_dict = cls._get_bpmn_process_dict(
|
||||
bpmn_subprocess
|
||||
)
|
||||
spiff_bpmn_process_dict["subprocesses"][
|
||||
bpmn_subprocess.guid
|
||||
] = single_bpmn_process_dict
|
||||
bpmn_subprocess_id_to_guid_mappings[bpmn_subprocess.id] = bpmn_subprocess.guid
|
||||
single_bpmn_process_dict = cls._get_bpmn_process_dict(bpmn_subprocess)
|
||||
spiff_bpmn_process_dict["subprocesses"][bpmn_subprocess.guid] = single_bpmn_process_dict
|
||||
|
||||
tasks = TaskModel.query.filter(
|
||||
TaskModel.bpmn_process_id.in_(bpmn_subprocess_id_to_guid_mappings.keys()) # type: ignore
|
||||
).all()
|
||||
cls._get_tasks_dict(
|
||||
tasks, spiff_bpmn_process_dict, bpmn_subprocess_id_to_guid_mappings
|
||||
)
|
||||
cls._get_tasks_dict(tasks, spiff_bpmn_process_dict, bpmn_subprocess_id_to_guid_mappings)
|
||||
|
||||
return spiff_bpmn_process_dict
|
||||
|
||||
|
@ -786,17 +709,11 @@ class ProcessInstanceProcessor:
|
|||
spiff_logger.setLevel(logging.WARNING)
|
||||
|
||||
try:
|
||||
full_bpmn_process_dict = (
|
||||
ProcessInstanceProcessor._get_full_bpmn_process_dict(
|
||||
process_instance_model,
|
||||
bpmn_definition_to_task_definitions_mappings,
|
||||
)
|
||||
)
|
||||
bpmn_process_instance = (
|
||||
ProcessInstanceProcessor._serializer.workflow_from_dict(
|
||||
full_bpmn_process_dict
|
||||
)
|
||||
full_bpmn_process_dict = ProcessInstanceProcessor._get_full_bpmn_process_dict(
|
||||
process_instance_model,
|
||||
bpmn_definition_to_task_definitions_mappings,
|
||||
)
|
||||
bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict(full_bpmn_process_dict)
|
||||
except Exception as err:
|
||||
raise err
|
||||
finally:
|
||||
|
@ -804,14 +721,10 @@ class ProcessInstanceProcessor:
|
|||
|
||||
ProcessInstanceProcessor.set_script_engine(bpmn_process_instance)
|
||||
else:
|
||||
bpmn_process_instance = (
|
||||
ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec(
|
||||
spec, subprocesses
|
||||
)
|
||||
bpmn_process_instance = ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec(
|
||||
spec, subprocesses
|
||||
)
|
||||
bpmn_process_instance.data[
|
||||
ProcessInstanceProcessor.VALIDATION_PROCESS_KEY
|
||||
] = validate_only
|
||||
bpmn_process_instance.data[ProcessInstanceProcessor.VALIDATION_PROCESS_KEY] = validate_only
|
||||
return (
|
||||
bpmn_process_instance,
|
||||
full_bpmn_process_dict,
|
||||
|
@ -820,22 +733,16 @@ class ProcessInstanceProcessor:
|
|||
|
||||
def slam_in_data(self, data: dict) -> None:
|
||||
"""Slam_in_data."""
|
||||
self.bpmn_process_instance.data = DeepMerge.merge(
|
||||
self.bpmn_process_instance.data, data
|
||||
)
|
||||
self.bpmn_process_instance.data = DeepMerge.merge(self.bpmn_process_instance.data, data)
|
||||
|
||||
self.save()
|
||||
|
||||
def raise_if_no_potential_owners(
|
||||
self, potential_owner_ids: list[int], message: str
|
||||
) -> None:
|
||||
def raise_if_no_potential_owners(self, potential_owner_ids: list[int], message: str) -> None:
|
||||
"""Raise_if_no_potential_owners."""
|
||||
if not potential_owner_ids:
|
||||
raise NoPotentialOwnersForTaskError(message)
|
||||
|
||||
def get_potential_owner_ids_from_task(
|
||||
self, task: SpiffTask
|
||||
) -> PotentialOwnerIdList:
|
||||
def get_potential_owner_ids_from_task(self, task: SpiffTask) -> PotentialOwnerIdList:
|
||||
"""Get_potential_owner_ids_from_task."""
|
||||
task_spec = task.task_spec
|
||||
task_lane = "process_initiator"
|
||||
|
@ -862,14 +769,8 @@ class ProcessInstanceProcessor:
|
|||
else:
|
||||
group_model = GroupModel.query.filter_by(identifier=task_lane).first()
|
||||
if group_model is None:
|
||||
raise (
|
||||
NoPotentialOwnersForTaskError(
|
||||
f"Could not find a group with name matching lane: {task_lane}"
|
||||
)
|
||||
)
|
||||
potential_owner_ids = [
|
||||
i.user_id for i in group_model.user_group_assignments
|
||||
]
|
||||
raise (NoPotentialOwnersForTaskError(f"Could not find a group with name matching lane: {task_lane}"))
|
||||
potential_owner_ids = [i.user_id for i in group_model.user_group_assignments]
|
||||
lane_assignment_id = group_model.id
|
||||
self.raise_if_no_potential_owners(
|
||||
potential_owner_ids,
|
||||
|
@ -961,14 +862,10 @@ class ProcessInstanceProcessor:
|
|||
for task_name, _task_spec in bpmn_definition_dict["spec"]["task_specs"].items():
|
||||
processes[bpmn_definition_dict["spec"]["name"]].append(task_name)
|
||||
if "subprocess_specs" in bpmn_definition_dict:
|
||||
for subprocess_name, subprocess_details in bpmn_definition_dict[
|
||||
"subprocess_specs"
|
||||
].items():
|
||||
for subprocess_name, subprocess_details in bpmn_definition_dict["subprocess_specs"].items():
|
||||
processes[subprocess_name] = []
|
||||
if "task_specs" in subprocess_details:
|
||||
for task_name, _task_spec in subprocess_details[
|
||||
"task_specs"
|
||||
].items():
|
||||
for task_name, _task_spec in subprocess_details["task_specs"].items():
|
||||
processes[subprocess_name].append(task_name)
|
||||
return processes
|
||||
|
||||
|
@ -987,11 +884,7 @@ class ProcessInstanceProcessor:
|
|||
|
||||
for process_name, task_spec_names in processes.items():
|
||||
if task_name in task_spec_names:
|
||||
process_name_to_return = (
|
||||
self.find_process_model_process_name_by_task_name(
|
||||
process_name, processes
|
||||
)
|
||||
)
|
||||
process_name_to_return = self.find_process_model_process_name_by_task_name(process_name, processes)
|
||||
return process_name_to_return
|
||||
|
||||
#################################################################
|
||||
|
@ -1007,9 +900,7 @@ class ProcessInstanceProcessor:
|
|||
bpmn_definition_dict = self.full_bpmn_process_dict
|
||||
spiff_task_json = bpmn_definition_dict["spec"]["task_specs"] or {}
|
||||
if "subprocess_specs" in bpmn_definition_dict:
|
||||
for _subprocess_name, subprocess_details in bpmn_definition_dict[
|
||||
"subprocess_specs"
|
||||
].items():
|
||||
for _subprocess_name, subprocess_details in bpmn_definition_dict["subprocess_specs"].items():
|
||||
if "task_specs" in subprocess_details:
|
||||
spiff_task_json = spiff_task_json | subprocess_details["task_specs"]
|
||||
return spiff_task_json
|
||||
|
@ -1035,16 +926,12 @@ class ProcessInstanceProcessor:
|
|||
subprocesses_by_child_task_ids = {}
|
||||
task_typename_by_task_id = {}
|
||||
if "subprocesses" in process_instance_data_dict:
|
||||
for subprocess_id, subprocess_details in process_instance_data_dict[
|
||||
"subprocesses"
|
||||
].items():
|
||||
for subprocess_id, subprocess_details in process_instance_data_dict["subprocesses"].items():
|
||||
for task_id, task_details in subprocess_details["tasks"].items():
|
||||
subprocesses_by_child_task_ids[task_id] = subprocess_id
|
||||
task_name = task_details["task_spec"]
|
||||
if task_name in spiff_task_json:
|
||||
task_typename_by_task_id[task_id] = spiff_task_json[task_name][
|
||||
"typename"
|
||||
]
|
||||
task_typename_by_task_id[task_id] = spiff_task_json[task_name]["typename"]
|
||||
return (subprocesses_by_child_task_ids, task_typename_by_task_id)
|
||||
|
||||
def get_highest_level_calling_subprocesses_by_child_task_ids(
|
||||
|
@ -1060,15 +947,10 @@ class ProcessInstanceProcessor:
|
|||
if current_subprocess_id_for_task in task_typename_by_task_id:
|
||||
# a call activity is like the top-level subprocess since it is the calling subprocess
|
||||
# according to spiff and the top-level calling subprocess is really what we care about
|
||||
if (
|
||||
task_typename_by_task_id[current_subprocess_id_for_task]
|
||||
== "CallActivity"
|
||||
):
|
||||
if task_typename_by_task_id[current_subprocess_id_for_task] == "CallActivity":
|
||||
continue
|
||||
|
||||
subprocesses_by_child_task_ids[task_id] = (
|
||||
subprocesses_by_child_task_ids[subprocess_id]
|
||||
)
|
||||
subprocesses_by_child_task_ids[task_id] = subprocesses_by_child_task_ids[subprocess_id]
|
||||
self.get_highest_level_calling_subprocesses_by_child_task_ids(
|
||||
subprocesses_by_child_task_ids, task_typename_by_task_id
|
||||
)
|
||||
|
@ -1081,12 +963,10 @@ class ProcessInstanceProcessor:
|
|||
store_bpmn_definition_mappings: bool = False,
|
||||
) -> BpmnProcessDefinitionModel:
|
||||
process_bpmn_identifier = process_bpmn_properties["name"]
|
||||
new_hash_digest = sha256(
|
||||
json.dumps(process_bpmn_properties, sort_keys=True).encode("utf8")
|
||||
).hexdigest()
|
||||
bpmn_process_definition: Optional[BpmnProcessDefinitionModel] = (
|
||||
BpmnProcessDefinitionModel.query.filter_by(hash=new_hash_digest).first()
|
||||
)
|
||||
new_hash_digest = sha256(json.dumps(process_bpmn_properties, sort_keys=True).encode("utf8")).hexdigest()
|
||||
bpmn_process_definition: Optional[BpmnProcessDefinitionModel] = BpmnProcessDefinitionModel.query.filter_by(
|
||||
hash=new_hash_digest
|
||||
).first()
|
||||
|
||||
if bpmn_process_definition is None:
|
||||
task_specs = process_bpmn_properties.pop("task_specs")
|
||||
|
@ -1125,12 +1005,10 @@ class ProcessInstanceProcessor:
|
|||
)
|
||||
|
||||
if bpmn_process_definition_parent is not None:
|
||||
bpmn_process_definition_relationship = (
|
||||
BpmnProcessDefinitionRelationshipModel.query.filter_by(
|
||||
bpmn_process_definition_parent_id=bpmn_process_definition_parent.id,
|
||||
bpmn_process_definition_child_id=bpmn_process_definition.id,
|
||||
).first()
|
||||
)
|
||||
bpmn_process_definition_relationship = BpmnProcessDefinitionRelationshipModel.query.filter_by(
|
||||
bpmn_process_definition_parent_id=bpmn_process_definition_parent.id,
|
||||
bpmn_process_definition_child_id=bpmn_process_definition.id,
|
||||
).first()
|
||||
if bpmn_process_definition_relationship is None:
|
||||
bpmn_process_definition_relationship = BpmnProcessDefinitionRelationshipModel(
|
||||
bpmn_process_definition_parent_id=bpmn_process_definition_parent.id,
|
||||
|
@ -1141,9 +1019,7 @@ class ProcessInstanceProcessor:
|
|||
|
||||
def _add_bpmn_process_definitions(self, bpmn_spec_dict: dict) -> None:
|
||||
# store only if mappings is currently empty. this also would mean this is a new instance that has never saved before
|
||||
store_bpmn_definition_mappings = (
|
||||
not self.bpmn_definition_to_task_definitions_mappings
|
||||
)
|
||||
store_bpmn_definition_mappings = not self.bpmn_definition_to_task_definitions_mappings
|
||||
bpmn_process_definition_parent = self._store_bpmn_process_definition(
|
||||
bpmn_spec_dict["spec"],
|
||||
store_bpmn_definition_mappings=store_bpmn_definition_mappings,
|
||||
|
@ -1154,9 +1030,7 @@ class ProcessInstanceProcessor:
|
|||
bpmn_process_definition_parent,
|
||||
store_bpmn_definition_mappings=store_bpmn_definition_mappings,
|
||||
)
|
||||
self.process_instance_model.bpmn_process_definition = (
|
||||
bpmn_process_definition_parent
|
||||
)
|
||||
self.process_instance_model.bpmn_process_definition = bpmn_process_definition_parent
|
||||
|
||||
def _add_bpmn_json_records(self) -> None:
|
||||
"""Adds serialized_bpmn_definition and process_instance_data records to the db session.
|
||||
|
@ -1178,14 +1052,12 @@ class ProcessInstanceProcessor:
|
|||
self._add_bpmn_process_definitions(bpmn_spec_dict)
|
||||
|
||||
subprocesses = process_instance_data_dict.pop("subprocesses")
|
||||
bpmn_process_parent, new_task_models, new_json_data_dicts = (
|
||||
TaskService.add_bpmn_process(
|
||||
bpmn_process_dict=process_instance_data_dict,
|
||||
process_instance=self.process_instance_model,
|
||||
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
|
||||
spiff_workflow=self.bpmn_process_instance,
|
||||
serializer=self._serializer,
|
||||
)
|
||||
bpmn_process_parent, new_task_models, new_json_data_dicts = TaskService.add_bpmn_process(
|
||||
bpmn_process_dict=process_instance_data_dict,
|
||||
process_instance=self.process_instance_model,
|
||||
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
|
||||
spiff_workflow=self.bpmn_process_instance,
|
||||
serializer=self._serializer,
|
||||
)
|
||||
for subprocess_task_id, subprocess_properties in subprocesses.items():
|
||||
(
|
||||
|
@ -1216,13 +1088,10 @@ class ProcessInstanceProcessor:
|
|||
user_tasks = list(self.get_all_user_tasks())
|
||||
self.process_instance_model.status = self.get_status().value
|
||||
current_app.logger.debug(
|
||||
f"the_status: {self.process_instance_model.status} for instance"
|
||||
f" {self.process_instance_model.id}"
|
||||
f"the_status: {self.process_instance_model.status} for instance {self.process_instance_model.id}"
|
||||
)
|
||||
self.process_instance_model.total_tasks = len(user_tasks)
|
||||
self.process_instance_model.completed_tasks = sum(
|
||||
1 for t in user_tasks if t.state in complete_states
|
||||
)
|
||||
self.process_instance_model.completed_tasks = sum(1 for t in user_tasks if t.state in complete_states)
|
||||
|
||||
if self.process_instance_model.start_in_seconds is None:
|
||||
self.process_instance_model.start_in_seconds = round(time.time())
|
||||
|
@ -1252,9 +1121,7 @@ class ProcessInstanceProcessor:
|
|||
# filter out non-usertasks
|
||||
task_spec = ready_or_waiting_task.task_spec
|
||||
if not self.bpmn_process_instance._is_engine_task(task_spec):
|
||||
potential_owner_hash = self.get_potential_owner_ids_from_task(
|
||||
ready_or_waiting_task
|
||||
)
|
||||
potential_owner_hash = self.get_potential_owner_ids_from_task(ready_or_waiting_task)
|
||||
extensions = task_spec.extensions
|
||||
|
||||
# in the xml, it's the id attribute. this identifies the process where the activity lives.
|
||||
|
@ -1292,21 +1159,15 @@ class ProcessInstanceProcessor:
|
|||
)
|
||||
db.session.add(human_task)
|
||||
|
||||
for potential_owner_id in potential_owner_hash[
|
||||
"potential_owner_ids"
|
||||
]:
|
||||
human_task_user = HumanTaskUserModel(
|
||||
user_id=potential_owner_id, human_task=human_task
|
||||
)
|
||||
for potential_owner_id in potential_owner_hash["potential_owner_ids"]:
|
||||
human_task_user = HumanTaskUserModel(user_id=potential_owner_id, human_task=human_task)
|
||||
db.session.add(human_task_user)
|
||||
|
||||
self.increment_spiff_step()
|
||||
spiff_step_detail_mapping = self.spiff_step_details_mapping(
|
||||
spiff_task=ready_or_waiting_task, start_in_seconds=time.time()
|
||||
)
|
||||
spiff_step_detail = SpiffStepDetailsModel(
|
||||
**spiff_step_detail_mapping
|
||||
)
|
||||
spiff_step_detail = SpiffStepDetailsModel(**spiff_step_detail_mapping)
|
||||
db.session.add(spiff_step_detail)
|
||||
db.session.commit()
|
||||
# self.log_spiff_step_details(spiff_step_detail_mapping)
|
||||
|
@ -1330,8 +1191,7 @@ class ProcessInstanceProcessor:
|
|||
if payload is not None:
|
||||
event_definition.payload = payload
|
||||
current_app.logger.info(
|
||||
f"Event of type {event_definition.event_type} sent to process instance"
|
||||
f" {self.process_instance_model.id}"
|
||||
f"Event of type {event_definition.event_type} sent to process instance {self.process_instance_model.id}"
|
||||
)
|
||||
try:
|
||||
self.bpmn_process_instance.catch(event_definition)
|
||||
|
@ -1370,18 +1230,14 @@ class ProcessInstanceProcessor:
|
|||
spiff_task.complete()
|
||||
else:
|
||||
spiff_logger = logging.getLogger("spiff")
|
||||
spiff_logger.info(
|
||||
f"Skipped task {spiff_task.task_spec.name}", extra=spiff_task.log_info()
|
||||
)
|
||||
spiff_logger.info(f"Skipped task {spiff_task.task_spec.name}", extra=spiff_task.log_info())
|
||||
spiff_task._set_state(TaskState.COMPLETED)
|
||||
for child in spiff_task.children:
|
||||
child.task_spec._update(child)
|
||||
spiff_task.workflow.last_task = spiff_task
|
||||
|
||||
if isinstance(spiff_task.task_spec, EndEvent):
|
||||
for task in self.bpmn_process_instance.get_tasks(
|
||||
TaskState.DEFINITE_MASK, workflow=spiff_task.workflow
|
||||
):
|
||||
for task in self.bpmn_process_instance.get_tasks(TaskState.DEFINITE_MASK, workflow=spiff_task.workflow):
|
||||
task.complete()
|
||||
|
||||
# A subworkflow task will become ready when its workflow is complete. Engine steps would normally
|
||||
|
@ -1407,8 +1263,7 @@ class ProcessInstanceProcessor:
|
|||
step_detail = (
|
||||
db.session.query(SpiffStepDetailsModel)
|
||||
.filter(
|
||||
SpiffStepDetailsModel.process_instance_id
|
||||
== self.process_instance_model.id,
|
||||
SpiffStepDetailsModel.process_instance_id == self.process_instance_model.id,
|
||||
SpiffStepDetailsModel.spiff_step == spiff_step,
|
||||
)
|
||||
.first()
|
||||
|
@ -1454,15 +1309,11 @@ class ProcessInstanceProcessor:
|
|||
process_models = ProcessModelService.get_process_models(recursive=True)
|
||||
for process_model in process_models:
|
||||
try:
|
||||
refs = SpecFileService.reference_map(
|
||||
SpecFileService.get_references_for_process(process_model)
|
||||
)
|
||||
refs = SpecFileService.reference_map(SpecFileService.get_references_for_process(process_model))
|
||||
bpmn_process_identifiers = refs.keys()
|
||||
if bpmn_process_identifier in bpmn_process_identifiers:
|
||||
SpecFileService.update_process_cache(refs[bpmn_process_identifier])
|
||||
return FileSystemService.full_path_to_process_model_file(
|
||||
process_model
|
||||
)
|
||||
return FileSystemService.full_path_to_process_model_file(process_model)
|
||||
except Exception:
|
||||
current_app.logger.warning("Failed to parse process ", process_model.id)
|
||||
return None
|
||||
|
@ -1474,19 +1325,14 @@ class ProcessInstanceProcessor:
|
|||
"""Bpmn_file_full_path_from_bpmn_process_identifier."""
|
||||
if bpmn_process_identifier is None:
|
||||
raise ValueError(
|
||||
"bpmn_file_full_path_from_bpmn_process_identifier:"
|
||||
" bpmn_process_identifier is unexpectedly None"
|
||||
"bpmn_file_full_path_from_bpmn_process_identifier: bpmn_process_identifier is unexpectedly None"
|
||||
)
|
||||
|
||||
spec_reference = SpecReferenceCache.query.filter_by(
|
||||
identifier=bpmn_process_identifier, type="process"
|
||||
).first()
|
||||
spec_reference = SpecReferenceCache.query.filter_by(identifier=bpmn_process_identifier, type="process").first()
|
||||
bpmn_file_full_path = None
|
||||
if spec_reference is None:
|
||||
bpmn_file_full_path = (
|
||||
ProcessInstanceProcessor.backfill_missing_spec_reference_records(
|
||||
bpmn_process_identifier
|
||||
)
|
||||
bpmn_file_full_path = ProcessInstanceProcessor.backfill_missing_spec_reference_records(
|
||||
bpmn_process_identifier
|
||||
)
|
||||
else:
|
||||
bpmn_file_full_path = os.path.join(
|
||||
|
@ -1497,10 +1343,7 @@ class ProcessInstanceProcessor:
|
|||
raise (
|
||||
ApiError(
|
||||
error_code="could_not_find_bpmn_process_identifier",
|
||||
message=(
|
||||
"Could not find the the given bpmn process identifier from any"
|
||||
" sources: %s"
|
||||
)
|
||||
message="Could not find the the given bpmn process identifier from any sources: %s"
|
||||
% bpmn_process_identifier,
|
||||
)
|
||||
)
|
||||
|
@ -1532,9 +1375,7 @@ class ProcessInstanceProcessor:
|
|||
bpmn_process_identifier
|
||||
)
|
||||
new_bpmn_files.add(new_bpmn_file_full_path)
|
||||
dmn_file_glob = os.path.join(
|
||||
os.path.dirname(new_bpmn_file_full_path), "*.dmn"
|
||||
)
|
||||
dmn_file_glob = os.path.join(os.path.dirname(new_bpmn_file_full_path), "*.dmn")
|
||||
parser.add_dmn_files_by_glob(dmn_file_glob)
|
||||
processed_identifiers.add(bpmn_process_identifier)
|
||||
|
||||
|
@ -1565,36 +1406,24 @@ class ProcessInstanceProcessor:
|
|||
error_code="invalid_xml",
|
||||
message=f"'{file.name}' is not a valid xml file." + str(xse),
|
||||
) from xse
|
||||
if (
|
||||
process_model_info.primary_process_id is None
|
||||
or process_model_info.primary_process_id == ""
|
||||
):
|
||||
if process_model_info.primary_process_id is None or process_model_info.primary_process_id == "":
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="no_primary_bpmn_error",
|
||||
message=(
|
||||
"There is no primary BPMN process id defined for"
|
||||
" process_model %s"
|
||||
)
|
||||
% process_model_info.id,
|
||||
message="There is no primary BPMN process id defined for process_model %s" % process_model_info.id,
|
||||
)
|
||||
)
|
||||
ProcessInstanceProcessor.update_spiff_parser_with_all_process_dependency_files(
|
||||
parser
|
||||
)
|
||||
ProcessInstanceProcessor.update_spiff_parser_with_all_process_dependency_files(parser)
|
||||
|
||||
try:
|
||||
bpmn_process_spec = parser.get_spec(process_model_info.primary_process_id)
|
||||
|
||||
# returns a dict of {process_id: bpmn_process_spec}, otherwise known as an IdToBpmnProcessSpecMapping
|
||||
subprocesses = parser.get_subprocess_specs(
|
||||
process_model_info.primary_process_id
|
||||
)
|
||||
subprocesses = parser.get_subprocess_specs(process_model_info.primary_process_id)
|
||||
except ValidationException as ve:
|
||||
raise ApiError(
|
||||
error_code="process_instance_validation_error",
|
||||
message="Failed to parse the Workflow Specification. "
|
||||
+ "Error is '%s.'" % str(ve),
|
||||
message="Failed to parse the Workflow Specification. " + "Error is '%s.'" % str(ve),
|
||||
file_name=ve.file_name,
|
||||
task_name=ve.name,
|
||||
task_id=ve.id,
|
||||
|
@ -1655,9 +1484,7 @@ class ProcessInstanceProcessor:
|
|||
def queue_waiting_receive_messages(self) -> None:
|
||||
"""Queue_waiting_receive_messages."""
|
||||
waiting_events = self.bpmn_process_instance.waiting_events()
|
||||
waiting_message_events = filter(
|
||||
lambda e: e["event_type"] == "Message", waiting_events
|
||||
)
|
||||
waiting_message_events = filter(lambda e: e["event_type"] == "Message", waiting_events)
|
||||
|
||||
for event in waiting_message_events:
|
||||
# Ensure we are only creating one message instance for each waiting message
|
||||
|
@ -1705,15 +1532,11 @@ class ProcessInstanceProcessor:
|
|||
) -> None:
|
||||
# NOTE: To avoid saving spiff step details, just comment out this function and the step_delegate and
|
||||
# set the TaskModelSavingDelegate's secondary_engine_step_delegate to None.
|
||||
def spiff_step_details_mapping_builder(
|
||||
task: SpiffTask, start: float, end: float
|
||||
) -> dict:
|
||||
def spiff_step_details_mapping_builder(task: SpiffTask, start: float, end: float) -> dict:
|
||||
self._script_engine.environment.revise_state_with_task_data(task)
|
||||
return self.spiff_step_details_mapping(task, start, end)
|
||||
|
||||
step_delegate = StepDetailLoggingDelegate(
|
||||
self.increment_spiff_step, spiff_step_details_mapping_builder
|
||||
)
|
||||
step_delegate = StepDetailLoggingDelegate(self.increment_spiff_step, spiff_step_details_mapping_builder)
|
||||
task_model_delegate = TaskModelSavingDelegate(
|
||||
secondary_engine_step_delegate=step_delegate,
|
||||
serializer=self._serializer,
|
||||
|
@ -1722,13 +1545,9 @@ class ProcessInstanceProcessor:
|
|||
)
|
||||
|
||||
if execution_strategy_name is None:
|
||||
execution_strategy_name = current_app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB"
|
||||
]
|
||||
execution_strategy_name = current_app.config["SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB"]
|
||||
|
||||
execution_strategy = execution_strategy_named(
|
||||
execution_strategy_name, task_model_delegate
|
||||
)
|
||||
execution_strategy = execution_strategy_named(execution_strategy_name, task_model_delegate)
|
||||
execution_service = WorkflowExecutionService(
|
||||
self.bpmn_process_instance,
|
||||
self.process_instance_model,
|
||||
|
@ -1764,14 +1583,8 @@ class ProcessInstanceProcessor:
|
|||
raise ApiError.from_workflow_exception("task_error", str(we), we) from we
|
||||
|
||||
@classmethod
|
||||
def get_tasks_with_data(
|
||||
cls, bpmn_process_instance: BpmnWorkflow
|
||||
) -> List[SpiffTask]:
|
||||
return [
|
||||
task
|
||||
for task in bpmn_process_instance.get_tasks(TaskState.FINISHED_MASK)
|
||||
if len(task.data) > 0
|
||||
]
|
||||
def get_tasks_with_data(cls, bpmn_process_instance: BpmnWorkflow) -> List[SpiffTask]:
|
||||
return [task for task in bpmn_process_instance.get_tasks(TaskState.FINISHED_MASK) if len(task.data) > 0]
|
||||
|
||||
@classmethod
|
||||
def get_task_data_size(cls, bpmn_process_instance: BpmnWorkflow) -> int:
|
||||
|
@ -1785,9 +1598,7 @@ class ProcessInstanceProcessor:
|
|||
|
||||
@classmethod
|
||||
def get_python_env_size(cls, bpmn_process_instance: BpmnWorkflow) -> int:
|
||||
user_defined_state = (
|
||||
bpmn_process_instance.script_engine.environment.user_defined_state()
|
||||
)
|
||||
user_defined_state = bpmn_process_instance.script_engine.environment.user_defined_state()
|
||||
|
||||
try:
|
||||
return len(json.dumps(user_defined_state))
|
||||
|
@ -1832,14 +1643,9 @@ class ProcessInstanceProcessor:
|
|||
|
||||
endtasks = []
|
||||
if self.bpmn_process_instance.is_completed():
|
||||
for task in SpiffTask.Iterator(
|
||||
self.bpmn_process_instance.task_tree, TaskState.ANY_MASK
|
||||
):
|
||||
for task in SpiffTask.Iterator(self.bpmn_process_instance.task_tree, TaskState.ANY_MASK):
|
||||
# Assure that we find the end event for this process_instance, and not for any sub-process_instances.
|
||||
if (
|
||||
isinstance(task.task_spec, EndEvent)
|
||||
and task.workflow == self.bpmn_process_instance
|
||||
):
|
||||
if isinstance(task.task_spec, EndEvent) and task.workflow == self.bpmn_process_instance:
|
||||
endtasks.append(task)
|
||||
if len(endtasks) > 0:
|
||||
return endtasks[-1]
|
||||
|
@ -1873,10 +1679,7 @@ class ProcessInstanceProcessor:
|
|||
if task._is_descendant_of(last_user_task):
|
||||
return task
|
||||
for task in ready_tasks:
|
||||
if (
|
||||
self.bpmn_process_instance.last_task
|
||||
and task.parent == last_user_task.parent
|
||||
):
|
||||
if self.bpmn_process_instance.last_task and task.parent == last_user_task.parent:
|
||||
return task
|
||||
|
||||
return ready_tasks[0]
|
||||
|
@ -1884,9 +1687,7 @@ class ProcessInstanceProcessor:
|
|||
# If there are no ready tasks, but the thing isn't complete yet, find the first non-complete task
|
||||
# and return that
|
||||
next_task = None
|
||||
for task in SpiffTask.Iterator(
|
||||
self.bpmn_process_instance.task_tree, TaskState.NOT_FINISHED_MASK
|
||||
):
|
||||
for task in SpiffTask.Iterator(self.bpmn_process_instance.task_tree, TaskState.NOT_FINISHED_MASK):
|
||||
next_task = task
|
||||
return next_task
|
||||
|
||||
|
@ -1896,9 +1697,7 @@ class ProcessInstanceProcessor:
|
|||
user_tasks.reverse()
|
||||
user_tasks = list(
|
||||
filter(
|
||||
lambda task: not self.bpmn_process_instance._is_engine_task(
|
||||
task.task_spec
|
||||
),
|
||||
lambda task: not self.bpmn_process_instance._is_engine_task(task.task_spec),
|
||||
user_tasks,
|
||||
)
|
||||
)
|
||||
|
@ -1907,24 +1706,19 @@ class ProcessInstanceProcessor:
|
|||
def get_task_dict_from_spiff_task(self, spiff_task: SpiffTask) -> dict[str, Any]:
|
||||
default_registry = DefaultRegistry()
|
||||
task_data = default_registry.convert(spiff_task.data)
|
||||
python_env = default_registry.convert(
|
||||
self._script_engine.environment.last_result()
|
||||
)
|
||||
python_env = default_registry.convert(self._script_engine.environment.last_result())
|
||||
task_json: Dict[str, Any] = {
|
||||
"task_data": task_data,
|
||||
"python_env": python_env,
|
||||
}
|
||||
return task_json
|
||||
|
||||
def complete_task(
|
||||
self, spiff_task: SpiffTask, human_task: HumanTaskModel, user: UserModel
|
||||
) -> None:
|
||||
def complete_task(self, spiff_task: SpiffTask, human_task: HumanTaskModel, user: UserModel) -> None:
|
||||
"""Complete_task."""
|
||||
task_model = TaskModel.query.filter_by(guid=human_task.task_id).first()
|
||||
if task_model is None:
|
||||
raise TaskNotFoundError(
|
||||
"Cannot find a task with guid"
|
||||
f" {self.process_instance_model.id} and task_id is {human_task.task_id}"
|
||||
f"Cannot find a task with guid {self.process_instance_model.id} and task_id is {human_task.task_id}"
|
||||
)
|
||||
|
||||
task_model.start_in_seconds = time.time()
|
||||
|
@ -1958,16 +1752,10 @@ class ProcessInstanceProcessor:
|
|||
db.session.add(details_model)
|
||||
# #######
|
||||
|
||||
json_data_dict_list = TaskService.update_task_model(
|
||||
task_model, spiff_task, self._serializer
|
||||
)
|
||||
json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self._serializer)
|
||||
for json_data_dict in json_data_dict_list:
|
||||
if json_data_dict is not None:
|
||||
json_data = (
|
||||
db.session.query(JsonDataModel.id)
|
||||
.filter_by(hash=json_data_dict["hash"])
|
||||
.first()
|
||||
)
|
||||
json_data = db.session.query(JsonDataModel.id).filter_by(hash=json_data_dict["hash"]).first()
|
||||
if json_data is None:
|
||||
json_data = JsonDataModel(**json_data_dict)
|
||||
db.session.add(json_data)
|
||||
|
@ -2021,11 +1809,7 @@ class ProcessInstanceProcessor:
|
|||
def get_all_user_tasks(self) -> List[SpiffTask]:
|
||||
"""Get_all_user_tasks."""
|
||||
all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
|
||||
return [
|
||||
t
|
||||
for t in all_tasks
|
||||
if not self.bpmn_process_instance._is_engine_task(t.task_spec)
|
||||
]
|
||||
return [t for t in all_tasks if not self.bpmn_process_instance._is_engine_task(t.task_spec)]
|
||||
|
||||
def get_all_completed_tasks(self) -> list[SpiffTask]:
|
||||
"""Get_all_completed_tasks."""
|
||||
|
|
|
@ -31,9 +31,7 @@ class ProcessInstanceQueueService:
|
|||
queue_item = ProcessInstanceLockService.try_unlock(process_instance.id)
|
||||
|
||||
if queue_item is None:
|
||||
queue_item = ProcessInstanceQueueModel(
|
||||
process_instance_id=process_instance.id
|
||||
)
|
||||
queue_item = ProcessInstanceQueueModel(process_instance_id=process_instance.id)
|
||||
|
||||
# TODO: configurable params (priority/run_at)
|
||||
queue_item.run_at_in_seconds = round(time.time())
|
||||
|
@ -73,8 +71,7 @@ class ProcessInstanceQueueService:
|
|||
|
||||
if queue_entry is None:
|
||||
raise ProcessInstanceIsNotEnqueuedError(
|
||||
f"{locked_by} cannot lock process instance {process_instance.id}. It"
|
||||
" has not been enqueued."
|
||||
f"{locked_by} cannot lock process instance {process_instance.id}. It has not been enqueued."
|
||||
)
|
||||
|
||||
if queue_entry.locked_by != locked_by:
|
||||
|
|
|
@ -76,13 +76,9 @@ class ProcessInstanceReportFilter:
|
|||
if self.has_terminal_status is not None:
|
||||
d["has_terminal_status"] = str(self.has_terminal_status).lower()
|
||||
if self.with_tasks_completed_by_me is not None:
|
||||
d["with_tasks_completed_by_me"] = str(
|
||||
self.with_tasks_completed_by_me
|
||||
).lower()
|
||||
d["with_tasks_completed_by_me"] = str(self.with_tasks_completed_by_me).lower()
|
||||
if self.with_tasks_assigned_to_my_group is not None:
|
||||
d["with_tasks_assigned_to_my_group"] = str(
|
||||
self.with_tasks_assigned_to_my_group
|
||||
).lower()
|
||||
d["with_tasks_assigned_to_my_group"] = str(self.with_tasks_assigned_to_my_group).lower()
|
||||
if self.with_relation_to_me is not None:
|
||||
d["with_relation_to_me"] = str(self.with_relation_to_me).lower()
|
||||
if self.process_initiator_username is not None:
|
||||
|
@ -177,8 +173,7 @@ class ProcessInstanceReportService:
|
|||
report_metadata = cls.system_metadata_map(report_identifier)
|
||||
if report_metadata is None:
|
||||
raise ProcessInstanceReportNotFoundError(
|
||||
f"Could not find a report with identifier '{report_identifier}' for"
|
||||
f" user '{user.username}'"
|
||||
f"Could not find a report with identifier '{report_identifier}' for user '{user.username}'"
|
||||
)
|
||||
|
||||
process_instance_report = ProcessInstanceReportModel(
|
||||
|
@ -190,23 +185,15 @@ class ProcessInstanceReportService:
|
|||
return process_instance_report # type: ignore
|
||||
|
||||
@classmethod
|
||||
def filter_by_to_dict(
|
||||
cls, process_instance_report: ProcessInstanceReportModel
|
||||
) -> dict[str, str]:
|
||||
def filter_by_to_dict(cls, process_instance_report: ProcessInstanceReportModel) -> dict[str, str]:
|
||||
"""Filter_by_to_dict."""
|
||||
metadata = process_instance_report.report_metadata
|
||||
filter_by = metadata.get("filter_by", [])
|
||||
filters = {
|
||||
d["field_name"]: d["field_value"]
|
||||
for d in filter_by
|
||||
if "field_name" in d and "field_value" in d
|
||||
}
|
||||
filters = {d["field_name"]: d["field_value"] for d in filter_by if "field_name" in d and "field_value" in d}
|
||||
return filters
|
||||
|
||||
@classmethod
|
||||
def filter_from_metadata(
|
||||
cls, process_instance_report: ProcessInstanceReportModel
|
||||
) -> ProcessInstanceReportFilter:
|
||||
def filter_from_metadata(cls, process_instance_report: ProcessInstanceReportModel) -> ProcessInstanceReportFilter:
|
||||
"""Filter_from_metadata."""
|
||||
filters = cls.filter_by_to_dict(process_instance_report)
|
||||
|
||||
|
@ -308,9 +295,7 @@ class ProcessInstanceReportService:
|
|||
if report_filter_by_list is not None:
|
||||
report_filter.report_filter_by_list = report_filter_by_list
|
||||
if with_tasks_assigned_to_my_group is not None:
|
||||
report_filter.with_tasks_assigned_to_my_group = (
|
||||
with_tasks_assigned_to_my_group
|
||||
)
|
||||
report_filter.with_tasks_assigned_to_my_group = with_tasks_assigned_to_my_group
|
||||
if with_relation_to_me is not None:
|
||||
report_filter.with_relation_to_me = with_relation_to_me
|
||||
|
||||
|
@ -328,17 +313,13 @@ class ProcessInstanceReportService:
|
|||
process_instance_dict = process_instance["ProcessInstanceModel"].serialized
|
||||
for metadata_column in metadata_columns:
|
||||
if metadata_column["accessor"] not in process_instance_dict:
|
||||
process_instance_dict[metadata_column["accessor"]] = (
|
||||
process_instance[metadata_column["accessor"]]
|
||||
)
|
||||
process_instance_dict[metadata_column["accessor"]] = process_instance[metadata_column["accessor"]]
|
||||
|
||||
results.append(process_instance_dict)
|
||||
return results
|
||||
|
||||
@classmethod
|
||||
def get_column_names_for_model(
|
||||
cls, model: Type[SpiffworkflowBaseDBModel]
|
||||
) -> list[str]:
|
||||
def get_column_names_for_model(cls, model: Type[SpiffworkflowBaseDBModel]) -> list[str]:
|
||||
"""Get_column_names_for_model."""
|
||||
return [i.name for i in model.__table__.columns]
|
||||
|
||||
|
@ -374,24 +355,17 @@ class ProcessInstanceReportService:
|
|||
"""Run_process_instance_report."""
|
||||
process_instance_query = ProcessInstanceModel.query
|
||||
# Always join that hot user table for good performance at serialization time.
|
||||
process_instance_query = process_instance_query.options(
|
||||
selectinload(ProcessInstanceModel.process_initiator)
|
||||
)
|
||||
process_instance_query = process_instance_query.options(selectinload(ProcessInstanceModel.process_initiator))
|
||||
|
||||
if report_filter.process_model_identifier is not None:
|
||||
process_model = ProcessModelService.get_process_model(
|
||||
f"{report_filter.process_model_identifier}",
|
||||
)
|
||||
|
||||
process_instance_query = process_instance_query.filter_by(
|
||||
process_model_identifier=process_model.id
|
||||
)
|
||||
process_instance_query = process_instance_query.filter_by(process_model_identifier=process_model.id)
|
||||
|
||||
# this can never happen. obviously the class has the columns it defines. this is just to appease mypy.
|
||||
if (
|
||||
ProcessInstanceModel.start_in_seconds is None
|
||||
or ProcessInstanceModel.end_in_seconds is None
|
||||
):
|
||||
if ProcessInstanceModel.start_in_seconds is None or ProcessInstanceModel.end_in_seconds is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="unexpected_condition",
|
||||
|
@ -422,9 +396,7 @@ class ProcessInstanceReportService:
|
|||
)
|
||||
|
||||
if report_filter.initiated_by_me is True:
|
||||
process_instance_query = process_instance_query.filter_by(
|
||||
process_initiator=user
|
||||
)
|
||||
process_instance_query = process_instance_query.filter_by(process_initiator=user)
|
||||
|
||||
if report_filter.has_terminal_status is True:
|
||||
process_instance_query = process_instance_query.filter(
|
||||
|
@ -432,24 +404,18 @@ class ProcessInstanceReportService:
|
|||
)
|
||||
|
||||
if report_filter.process_initiator_username is not None:
|
||||
user = UserModel.query.filter_by(
|
||||
username=report_filter.process_initiator_username
|
||||
).first()
|
||||
user = UserModel.query.filter_by(username=report_filter.process_initiator_username).first()
|
||||
process_initiator_id = -1
|
||||
if user:
|
||||
process_initiator_id = user.id
|
||||
process_instance_query = process_instance_query.filter_by(
|
||||
process_initiator_id=process_initiator_id
|
||||
)
|
||||
process_instance_query = process_instance_query.filter_by(process_initiator_id=process_initiator_id)
|
||||
|
||||
if (
|
||||
not report_filter.with_tasks_completed_by_me
|
||||
and not report_filter.with_tasks_assigned_to_my_group
|
||||
and report_filter.with_relation_to_me is True
|
||||
):
|
||||
process_instance_query = process_instance_query.outerjoin(
|
||||
HumanTaskModel
|
||||
).outerjoin(
|
||||
process_instance_query = process_instance_query.outerjoin(HumanTaskModel).outerjoin(
|
||||
HumanTaskUserModel,
|
||||
and_(
|
||||
HumanTaskModel.id == HumanTaskUserModel.human_task_id,
|
||||
|
@ -476,37 +442,23 @@ class ProcessInstanceReportService:
|
|||
)
|
||||
|
||||
if report_filter.with_tasks_assigned_to_my_group is True:
|
||||
group_model_join_conditions = [
|
||||
GroupModel.id == HumanTaskModel.lane_assignment_id
|
||||
]
|
||||
group_model_join_conditions = [GroupModel.id == HumanTaskModel.lane_assignment_id]
|
||||
if report_filter.user_group_identifier:
|
||||
group_model_join_conditions.append(
|
||||
GroupModel.identifier == report_filter.user_group_identifier
|
||||
)
|
||||
group_model_join_conditions.append(GroupModel.identifier == report_filter.user_group_identifier)
|
||||
process_instance_query = process_instance_query.join(HumanTaskModel)
|
||||
process_instance_query = process_instance_query.join(
|
||||
GroupModel, and_(*group_model_join_conditions)
|
||||
)
|
||||
process_instance_query = process_instance_query.join(GroupModel, and_(*group_model_join_conditions))
|
||||
process_instance_query = process_instance_query.join(
|
||||
UserGroupAssignmentModel,
|
||||
UserGroupAssignmentModel.group_id == GroupModel.id,
|
||||
)
|
||||
process_instance_query = process_instance_query.filter(
|
||||
UserGroupAssignmentModel.user_id == user.id
|
||||
)
|
||||
process_instance_query = process_instance_query.filter(UserGroupAssignmentModel.user_id == user.id)
|
||||
|
||||
instance_metadata_aliases = {}
|
||||
stock_columns = ProcessInstanceReportService.get_column_names_for_model(
|
||||
ProcessInstanceModel
|
||||
)
|
||||
stock_columns = ProcessInstanceReportService.get_column_names_for_model(ProcessInstanceModel)
|
||||
if report_filter.report_column_list:
|
||||
process_instance_report.report_metadata["columns"] = (
|
||||
report_filter.report_column_list
|
||||
)
|
||||
process_instance_report.report_metadata["columns"] = report_filter.report_column_list
|
||||
if report_filter.report_filter_by_list:
|
||||
process_instance_report.report_metadata["filter_by"] = (
|
||||
report_filter.report_filter_by_list
|
||||
)
|
||||
process_instance_report.report_metadata["filter_by"] = report_filter.report_filter_by_list
|
||||
|
||||
for column in process_instance_report.report_metadata["columns"]:
|
||||
if column["accessor"] in stock_columns:
|
||||
|
@ -531,14 +483,10 @@ class ProcessInstanceReportService:
|
|||
]
|
||||
if filter_for_column:
|
||||
isouter = False
|
||||
conditions.append(
|
||||
instance_metadata_alias.value == filter_for_column["field_value"]
|
||||
)
|
||||
conditions.append(instance_metadata_alias.value == filter_for_column["field_value"])
|
||||
process_instance_query = process_instance_query.join(
|
||||
instance_metadata_alias, and_(*conditions), isouter=isouter
|
||||
).add_columns(
|
||||
func.max(instance_metadata_alias.value).label(column["accessor"])
|
||||
)
|
||||
).add_columns(func.max(instance_metadata_alias.value).label(column["accessor"]))
|
||||
|
||||
order_by_query_array = []
|
||||
order_by_array = process_instance_report.report_metadata["order_by"]
|
||||
|
@ -548,22 +496,14 @@ class ProcessInstanceReportService:
|
|||
attribute = re.sub("^-", "", order_by_option)
|
||||
if attribute in stock_columns:
|
||||
if order_by_option.startswith("-"):
|
||||
order_by_query_array.append(
|
||||
getattr(ProcessInstanceModel, attribute).desc()
|
||||
)
|
||||
order_by_query_array.append(getattr(ProcessInstanceModel, attribute).desc())
|
||||
else:
|
||||
order_by_query_array.append(
|
||||
getattr(ProcessInstanceModel, attribute).asc()
|
||||
)
|
||||
order_by_query_array.append(getattr(ProcessInstanceModel, attribute).asc())
|
||||
elif attribute in instance_metadata_aliases:
|
||||
if order_by_option.startswith("-"):
|
||||
order_by_query_array.append(
|
||||
func.max(instance_metadata_aliases[attribute].value).desc()
|
||||
)
|
||||
order_by_query_array.append(func.max(instance_metadata_aliases[attribute].value).desc())
|
||||
else:
|
||||
order_by_query_array.append(
|
||||
func.max(instance_metadata_aliases[attribute].value).asc()
|
||||
)
|
||||
order_by_query_array.append(func.max(instance_metadata_aliases[attribute].value).asc())
|
||||
# return process_instance_query
|
||||
process_instances = (
|
||||
process_instance_query.group_by(ProcessInstanceModel.id)
|
||||
|
|
|
@ -84,9 +84,7 @@ class ProcessInstanceService:
|
|||
@staticmethod
|
||||
def do_waiting(status_value: str = ProcessInstanceStatus.waiting.value) -> None:
|
||||
"""Do_waiting."""
|
||||
process_instance_ids_to_check = ProcessInstanceQueueService.peek_many(
|
||||
status_value
|
||||
)
|
||||
process_instance_ids_to_check = ProcessInstanceQueueService.peek_many(status_value)
|
||||
if len(process_instance_ids_to_check) == 0:
|
||||
return
|
||||
|
||||
|
@ -100,18 +98,14 @@ class ProcessInstanceService:
|
|||
locked = False
|
||||
processor = None
|
||||
try:
|
||||
current_app.logger.info(
|
||||
f"Processing process_instance {process_instance.id}"
|
||||
)
|
||||
current_app.logger.info(f"Processing process_instance {process_instance.id}")
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.lock_process_instance(process_instance_lock_prefix)
|
||||
locked = True
|
||||
execution_strategy_name = current_app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND"
|
||||
]
|
||||
processor.do_engine_steps(
|
||||
save=True, execution_strategy_name=execution_strategy_name
|
||||
)
|
||||
processor.do_engine_steps(save=True, execution_strategy_name=execution_strategy_name)
|
||||
except ProcessInstanceIsAlreadyLockedError:
|
||||
continue
|
||||
except Exception as e:
|
||||
|
@ -120,8 +114,7 @@ class ProcessInstanceService:
|
|||
db.session.add(process_instance)
|
||||
db.session.commit()
|
||||
error_message = (
|
||||
"Error running waiting task for process_instance"
|
||||
f" {process_instance.id}"
|
||||
f"Error running waiting task for process_instance {process_instance.id}"
|
||||
+ f"({process_instance.process_model_identifier}). {str(e)}"
|
||||
)
|
||||
current_app.logger.error(error_message)
|
||||
|
@ -140,9 +133,7 @@ class ProcessInstanceService:
|
|||
# navigation = processor.bpmn_process_instance.get_deep_nav_list()
|
||||
# ProcessInstanceService.update_navigation(navigation, processor)
|
||||
process_model_service = ProcessModelService()
|
||||
process_model = process_model_service.get_process_model(
|
||||
processor.process_model_identifier
|
||||
)
|
||||
process_model = process_model_service.get_process_model(processor.process_model_identifier)
|
||||
process_model.display_name if process_model else ""
|
||||
process_instance_api = ProcessInstanceApi(
|
||||
id=processor.get_process_instance_id(),
|
||||
|
@ -155,34 +146,24 @@ class ProcessInstanceService:
|
|||
)
|
||||
|
||||
next_task_trying_again = next_task
|
||||
if (
|
||||
not next_task
|
||||
): # The Next Task can be requested to be a certain task, useful for parallel tasks.
|
||||
if not next_task: # The Next Task can be requested to be a certain task, useful for parallel tasks.
|
||||
# This may or may not work, sometimes there is no next task to complete.
|
||||
next_task_trying_again = processor.next_task()
|
||||
|
||||
if next_task_trying_again is not None:
|
||||
process_instance_api.next_task = (
|
||||
ProcessInstanceService.spiff_task_to_api_task(
|
||||
processor, next_task_trying_again, add_docs_and_forms=True
|
||||
)
|
||||
process_instance_api.next_task = ProcessInstanceService.spiff_task_to_api_task(
|
||||
processor, next_task_trying_again, add_docs_and_forms=True
|
||||
)
|
||||
|
||||
return process_instance_api
|
||||
|
||||
def get_process_instance(self, process_instance_id: int) -> Any:
|
||||
"""Get_process_instance."""
|
||||
result = (
|
||||
db.session.query(ProcessInstanceModel)
|
||||
.filter(ProcessInstanceModel.id == process_instance_id)
|
||||
.first()
|
||||
)
|
||||
result = db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.id == process_instance_id).first()
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def get_users_assigned_to_task(
|
||||
processor: ProcessInstanceProcessor, spiff_task: SpiffTask
|
||||
) -> List[int]:
|
||||
def get_users_assigned_to_task(processor: ProcessInstanceProcessor, spiff_task: SpiffTask) -> List[int]:
|
||||
"""Get_users_assigned_to_task."""
|
||||
if processor.process_instance_model.process_initiator_id is None:
|
||||
raise ApiError.from_task(
|
||||
|
@ -193,10 +174,7 @@ class ProcessInstanceService:
|
|||
|
||||
# Workflow associated with a study - get all the users
|
||||
else:
|
||||
if (
|
||||
not hasattr(spiff_task.task_spec, "lane")
|
||||
or spiff_task.task_spec.lane is None
|
||||
):
|
||||
if not hasattr(spiff_task.task_spec, "lane") or spiff_task.task_spec.lane is None:
|
||||
return [processor.process_instance_model.process_initiator_id]
|
||||
|
||||
if spiff_task.task_spec.lane not in spiff_task.data:
|
||||
|
@ -225,8 +203,7 @@ class ProcessInstanceService:
|
|||
else:
|
||||
raise ApiError.from_task(
|
||||
error_code="task_lane_user_error",
|
||||
message="Spiff Task %s lane user is not a string or dict"
|
||||
% spiff_task.task_spec.name,
|
||||
message="Spiff Task %s lane user is not a string or dict" % spiff_task.task_spec.name,
|
||||
task=spiff_task,
|
||||
)
|
||||
|
||||
|
@ -287,9 +264,7 @@ class ProcessInstanceService:
|
|||
models = []
|
||||
|
||||
for identifier, value, list_index in cls.possible_file_data_values(data):
|
||||
model = cls.file_data_model_for_value(
|
||||
identifier, value, process_instance_id
|
||||
)
|
||||
model = cls.file_data_model_for_value(identifier, value, process_instance_id)
|
||||
if model is not None:
|
||||
model.list_index = list_index
|
||||
models.append(model)
|
||||
|
@ -303,7 +278,9 @@ class ProcessInstanceService:
|
|||
models: List[ProcessInstanceFileDataModel],
|
||||
) -> None:
|
||||
for model in models:
|
||||
digest_reference = f"data:{model.mimetype};name={model.filename};base64,{cls.FILE_DATA_DIGEST_PREFIX}{model.digest}"
|
||||
digest_reference = (
|
||||
f"data:{model.mimetype};name={model.filename};base64,{cls.FILE_DATA_DIGEST_PREFIX}{model.digest}"
|
||||
)
|
||||
if model.list_index is None:
|
||||
data[model.identifier] = digest_reference
|
||||
else:
|
||||
|
@ -336,9 +313,7 @@ class ProcessInstanceService:
|
|||
Abstracted here because we need to do it multiple times when completing all tasks in
|
||||
a multi-instance task.
|
||||
"""
|
||||
AuthorizationService.assert_user_can_complete_spiff_task(
|
||||
processor.process_instance_model.id, spiff_task, user
|
||||
)
|
||||
AuthorizationService.assert_user_can_complete_spiff_task(processor.process_instance_model.id, spiff_task, user)
|
||||
|
||||
ProcessInstanceService.save_file_data_and_replace_with_digest_references(
|
||||
data,
|
||||
|
|
|
@ -62,9 +62,7 @@ class ProcessModelService(FileSystemService):
|
|||
process_group_path = os.path.abspath(
|
||||
os.path.join(
|
||||
FileSystemService.root_path(),
|
||||
FileSystemService.id_string_to_relative_path(
|
||||
process_group_identifier
|
||||
),
|
||||
FileSystemService.id_string_to_relative_path(process_group_identifier),
|
||||
)
|
||||
)
|
||||
return cls.is_process_group(process_group_path)
|
||||
|
@ -86,9 +84,7 @@ class ProcessModelService(FileSystemService):
|
|||
process_model_path = os.path.abspath(
|
||||
os.path.join(
|
||||
FileSystemService.root_path(),
|
||||
FileSystemService.id_string_to_relative_path(
|
||||
process_model_identifier
|
||||
),
|
||||
FileSystemService.id_string_to_relative_path(process_model_identifier),
|
||||
)
|
||||
)
|
||||
return cls.is_process_model(process_model_path)
|
||||
|
@ -96,9 +92,7 @@ class ProcessModelService(FileSystemService):
|
|||
return False
|
||||
|
||||
@staticmethod
|
||||
def write_json_file(
|
||||
file_path: str, json_data: dict, indent: int = 4, sort_keys: bool = True
|
||||
) -> None:
|
||||
def write_json_file(file_path: str, json_data: dict, indent: int = 4, sort_keys: bool = True) -> None:
|
||||
"""Write json file."""
|
||||
with open(file_path, "w") as h_open:
|
||||
json.dump(json_data, h_open, indent=indent, sort_keys=sort_keys)
|
||||
|
@ -120,9 +114,7 @@ class ProcessModelService(FileSystemService):
|
|||
cls.save_process_model(process_model)
|
||||
|
||||
@classmethod
|
||||
def update_process_model(
|
||||
cls, process_model: ProcessModelInfo, attributes_to_update: dict
|
||||
) -> None:
|
||||
def update_process_model(cls, process_model: ProcessModelInfo, attributes_to_update: dict) -> None:
|
||||
"""Update_spec."""
|
||||
for atu_key, atu_value in attributes_to_update.items():
|
||||
if hasattr(process_model, atu_key):
|
||||
|
@ -133,14 +125,10 @@ class ProcessModelService(FileSystemService):
|
|||
def save_process_model(cls, process_model: ProcessModelInfo) -> None:
|
||||
"""Save_process_model."""
|
||||
process_model_path = os.path.abspath(
|
||||
os.path.join(
|
||||
FileSystemService.root_path(), process_model.id_for_file_path()
|
||||
)
|
||||
os.path.join(FileSystemService.root_path(), process_model.id_for_file_path())
|
||||
)
|
||||
os.makedirs(process_model_path, exist_ok=True)
|
||||
json_path = os.path.abspath(
|
||||
os.path.join(process_model_path, cls.PROCESS_MODEL_JSON_FILE)
|
||||
)
|
||||
json_path = os.path.abspath(os.path.join(process_model_path, cls.PROCESS_MODEL_JSON_FILE))
|
||||
process_model_id = process_model.id
|
||||
# we don't save id in the json file
|
||||
# this allows us to move models around on the filesystem
|
||||
|
@ -157,32 +145,25 @@ class ProcessModelService(FileSystemService):
|
|||
).all()
|
||||
if len(instances) > 0:
|
||||
raise ProcessModelWithInstancesNotDeletableError(
|
||||
f"We cannot delete the model `{process_model_id}`, there are"
|
||||
" existing instances that depend on it."
|
||||
f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it."
|
||||
)
|
||||
process_model = self.get_process_model(process_model_id)
|
||||
path = self.workflow_path(process_model)
|
||||
shutil.rmtree(path)
|
||||
|
||||
def process_model_move(
|
||||
self, original_process_model_id: str, new_location: str
|
||||
) -> ProcessModelInfo:
|
||||
def process_model_move(self, original_process_model_id: str, new_location: str) -> ProcessModelInfo:
|
||||
"""Process_model_move."""
|
||||
process_model = self.get_process_model(original_process_model_id)
|
||||
original_model_path = self.workflow_path(process_model)
|
||||
_, model_id = os.path.split(original_model_path)
|
||||
new_relative_path = os.path.join(new_location, model_id)
|
||||
new_model_path = os.path.abspath(
|
||||
os.path.join(FileSystemService.root_path(), new_relative_path)
|
||||
)
|
||||
new_model_path = os.path.abspath(os.path.join(FileSystemService.root_path(), new_relative_path))
|
||||
shutil.move(original_model_path, new_model_path)
|
||||
new_process_model = self.get_process_model(new_relative_path)
|
||||
return new_process_model
|
||||
|
||||
@classmethod
|
||||
def get_process_model_from_relative_path(
|
||||
cls, relative_path: str
|
||||
) -> ProcessModelInfo:
|
||||
def get_process_model_from_relative_path(cls, relative_path: str) -> ProcessModelInfo:
|
||||
"""Get_process_model_from_relative_path."""
|
||||
path = os.path.join(FileSystemService.root_path(), relative_path)
|
||||
return cls.__scan_process_model(path)
|
||||
|
@ -196,9 +177,7 @@ class ProcessModelService(FileSystemService):
|
|||
if not os.path.exists(FileSystemService.root_path()):
|
||||
raise ProcessEntityNotFoundError("process_model_root_not_found")
|
||||
|
||||
model_path = os.path.abspath(
|
||||
os.path.join(FileSystemService.root_path(), process_model_id)
|
||||
)
|
||||
model_path = os.path.abspath(os.path.join(FileSystemService.root_path(), process_model_id))
|
||||
if cls.is_process_model(model_path):
|
||||
return cls.get_process_model_from_relative_path(process_model_id)
|
||||
raise ProcessEntityNotFoundError("process_model_not_found")
|
||||
|
@ -222,12 +201,8 @@ class ProcessModelService(FileSystemService):
|
|||
process_model_glob = os.path.join(root_path, "**", "process_model.json")
|
||||
|
||||
for file in glob(process_model_glob, recursive=True):
|
||||
process_model_relative_path = os.path.relpath(
|
||||
file, start=FileSystemService.root_path()
|
||||
)
|
||||
process_model = cls.get_process_model_from_relative_path(
|
||||
os.path.dirname(process_model_relative_path)
|
||||
)
|
||||
process_model_relative_path = os.path.relpath(file, start=FileSystemService.root_path())
|
||||
process_model = cls.get_process_model_from_relative_path(os.path.dirname(process_model_relative_path))
|
||||
process_models.append(process_model)
|
||||
process_models.sort()
|
||||
|
||||
|
@ -235,11 +210,7 @@ class ProcessModelService(FileSystemService):
|
|||
user = UserService.current_user()
|
||||
new_process_model_list = []
|
||||
for process_model in process_models:
|
||||
modified_process_model_id = (
|
||||
ProcessModelInfo.modify_process_identifier_for_path_param(
|
||||
process_model.id
|
||||
)
|
||||
)
|
||||
modified_process_model_id = ProcessModelInfo.modify_process_identifier_for_path_param(process_model.id)
|
||||
uri = f"/v1.0/process-instances/{modified_process_model_id}"
|
||||
has_permission = AuthorizationService.user_has_permission(
|
||||
user=user, permission="create", target_uri=uri
|
||||
|
@ -269,32 +240,24 @@ class ProcessModelService(FileSystemService):
|
|||
if parent_group:
|
||||
if full_group_id_path not in process_group_cache:
|
||||
process_group_cache[full_group_id_path] = parent_group
|
||||
parent_group_array.append(
|
||||
{"id": parent_group.id, "display_name": parent_group.display_name}
|
||||
)
|
||||
parent_group_array.append({"id": parent_group.id, "display_name": parent_group.display_name})
|
||||
return {"cache": process_group_cache, "process_groups": parent_group_array}
|
||||
|
||||
@classmethod
|
||||
def get_parent_group_array(cls, process_identifier: str) -> list[ProcessGroupLite]:
|
||||
"""Get_parent_group_array."""
|
||||
parent_group_lites_with_cache = cls.get_parent_group_array_and_cache_it(
|
||||
process_identifier, {}
|
||||
)
|
||||
parent_group_lites_with_cache = cls.get_parent_group_array_and_cache_it(process_identifier, {})
|
||||
return parent_group_lites_with_cache["process_groups"]
|
||||
|
||||
@classmethod
|
||||
def get_process_groups(
|
||||
cls, process_group_id: Optional[str] = None
|
||||
) -> list[ProcessGroup]:
|
||||
def get_process_groups(cls, process_group_id: Optional[str] = None) -> list[ProcessGroup]:
|
||||
"""Returns the process_groups."""
|
||||
process_groups = cls.__scan_process_groups(process_group_id)
|
||||
process_groups.sort()
|
||||
return process_groups
|
||||
|
||||
@classmethod
|
||||
def get_process_group(
|
||||
cls, process_group_id: str, find_direct_nested_items: bool = True
|
||||
) -> ProcessGroup:
|
||||
def get_process_group(cls, process_group_id: str, find_direct_nested_items: bool = True) -> ProcessGroup:
|
||||
"""Look for a given process_group, and return it."""
|
||||
if os.path.exists(FileSystemService.root_path()):
|
||||
process_group_path = os.path.abspath(
|
||||
|
@ -309,9 +272,7 @@ class ProcessModelService(FileSystemService):
|
|||
find_direct_nested_items=find_direct_nested_items,
|
||||
)
|
||||
|
||||
raise ProcessEntityNotFoundError(
|
||||
"process_group_not_found", f"Process Group Id: {process_group_id}"
|
||||
)
|
||||
raise ProcessEntityNotFoundError("process_group_not_found", f"Process Group Id: {process_group_id}")
|
||||
|
||||
@classmethod
|
||||
def add_process_group(cls, process_group: ProcessGroup) -> ProcessGroup:
|
||||
|
@ -331,16 +292,12 @@ class ProcessModelService(FileSystemService):
|
|||
cls.write_json_file(json_path, serialized_process_group)
|
||||
return process_group
|
||||
|
||||
def process_group_move(
|
||||
self, original_process_group_id: str, new_location: str
|
||||
) -> ProcessGroup:
|
||||
def process_group_move(self, original_process_group_id: str, new_location: str) -> ProcessGroup:
|
||||
"""Process_group_move."""
|
||||
original_group_path = self.process_group_path(original_process_group_id)
|
||||
_, original_group_id = os.path.split(original_group_path)
|
||||
new_root = os.path.join(FileSystemService.root_path(), new_location)
|
||||
new_group_path = os.path.abspath(
|
||||
os.path.join(FileSystemService.root_path(), new_root, original_group_id)
|
||||
)
|
||||
new_group_path = os.path.abspath(os.path.join(FileSystemService.root_path(), new_root, original_group_id))
|
||||
destination = shutil.move(original_group_path, new_group_path)
|
||||
new_process_group = self.get_process_group(destination)
|
||||
return new_process_group
|
||||
|
@ -388,9 +345,7 @@ class ProcessModelService(FileSystemService):
|
|||
return process_groups
|
||||
|
||||
@classmethod
|
||||
def __scan_process_groups(
|
||||
cls, process_group_id: Optional[str] = None
|
||||
) -> list[ProcessGroup]:
|
||||
def __scan_process_groups(cls, process_group_id: Optional[str] = None) -> list[ProcessGroup]:
|
||||
"""__scan_process_groups."""
|
||||
if not os.path.exists(FileSystemService.root_path()):
|
||||
return [] # Nothing to scan yet. There are no files.
|
||||
|
@ -409,9 +364,7 @@ class ProcessModelService(FileSystemService):
|
|||
return process_groups
|
||||
|
||||
@classmethod
|
||||
def find_or_create_process_group(
|
||||
cls, dir_path: str, find_direct_nested_items: bool = True
|
||||
) -> ProcessGroup:
|
||||
def find_or_create_process_group(cls, dir_path: str, find_direct_nested_items: bool = True) -> ProcessGroup:
|
||||
"""Reads the process_group.json file, and any nested directories."""
|
||||
cat_path = os.path.join(dir_path, cls.PROCESS_GROUP_JSON_FILE)
|
||||
if os.path.exists(cat_path):
|
||||
|
@ -424,15 +377,10 @@ class ProcessModelService(FileSystemService):
|
|||
if process_group is None:
|
||||
raise ApiError(
|
||||
error_code="process_group_could_not_be_loaded_from_disk",
|
||||
message=(
|
||||
"We could not load the process_group from disk from:"
|
||||
f" {dir_path}"
|
||||
),
|
||||
message=f"We could not load the process_group from disk from: {dir_path}",
|
||||
)
|
||||
else:
|
||||
process_group_id = cls.path_to_id(
|
||||
dir_path.replace(FileSystemService.root_path(), "")
|
||||
)
|
||||
process_group_id = cls.path_to_id(dir_path.replace(FileSystemService.root_path(), ""))
|
||||
process_group = ProcessGroup(
|
||||
id="",
|
||||
display_name=process_group_id,
|
||||
|
@ -452,9 +400,7 @@ class ProcessModelService(FileSystemService):
|
|||
# TODO: check whether this is a group or model
|
||||
if cls.is_process_group(nested_item.path):
|
||||
# This is a nested group
|
||||
process_group.process_groups.append(
|
||||
cls.find_or_create_process_group(nested_item.path)
|
||||
)
|
||||
process_group.process_groups.append(cls.find_or_create_process_group(nested_item.path))
|
||||
elif ProcessModelService.is_process_model(nested_item.path):
|
||||
process_group.process_models.append(
|
||||
cls.__scan_process_model(
|
||||
|
@ -490,19 +436,13 @@ class ProcessModelService(FileSystemService):
|
|||
if process_model_info is None:
|
||||
raise ApiError(
|
||||
error_code="process_model_could_not_be_loaded_from_disk",
|
||||
message=(
|
||||
"We could not load the process_model from disk with data:"
|
||||
f" {data}"
|
||||
),
|
||||
message=f"We could not load the process_model from disk with data: {data}",
|
||||
)
|
||||
else:
|
||||
if name is None:
|
||||
raise ApiError(
|
||||
error_code="missing_name_of_process_model",
|
||||
message=(
|
||||
"Missing name of process model. Path not found:"
|
||||
f" {json_file_path}"
|
||||
),
|
||||
message=f"Missing name of process model. Path not found: {json_file_path}",
|
||||
)
|
||||
|
||||
process_model_info = ProcessModelInfo(
|
||||
|
@ -511,9 +451,7 @@ class ProcessModelService(FileSystemService):
|
|||
description="",
|
||||
display_order=0,
|
||||
)
|
||||
cls.write_json_file(
|
||||
json_file_path, cls.PROCESS_MODEL_SCHEMA.dump(process_model_info)
|
||||
)
|
||||
cls.write_json_file(json_file_path, cls.PROCESS_MODEL_SCHEMA.dump(process_model_info))
|
||||
# we don't store `id` in the json files, so we add it in here
|
||||
process_model_info.id = name
|
||||
return process_model_info
|
||||
|
|
|
@ -81,9 +81,7 @@ class ScriptUnitTestRunner:
|
|||
context = cls._script_engine.environment.last_result()
|
||||
result_as_boolean = context == expected_output_context
|
||||
|
||||
script_unit_test_result = ScriptUnitTestResult(
|
||||
result=result_as_boolean, context=context
|
||||
)
|
||||
script_unit_test_result = ScriptUnitTestResult(result=result_as_boolean, context=context)
|
||||
return script_unit_test_result
|
||||
|
||||
@classmethod
|
||||
|
@ -95,9 +93,7 @@ class ScriptUnitTestRunner:
|
|||
"""Run_test."""
|
||||
# this is totally made up, but hopefully resembles what spiffworkflow ultimately does
|
||||
unit_tests = task.task_spec.extensions["unitTests"]
|
||||
unit_test = [
|
||||
unit_test for unit_test in unit_tests if unit_test["id"] == test_identifier
|
||||
][0]
|
||||
unit_test = [unit_test for unit_test in unit_tests if unit_test["id"] == test_identifier][0]
|
||||
|
||||
input_context = None
|
||||
expected_output_context = None
|
||||
|
@ -114,13 +110,8 @@ class ScriptUnitTestRunner:
|
|||
except json.decoder.JSONDecodeError as ex:
|
||||
return ScriptUnitTestResult(
|
||||
result=False,
|
||||
error=(
|
||||
"Failed to parse expectedOutputJson:"
|
||||
f" {unit_test['expectedOutputJson']}: {str(ex)}"
|
||||
),
|
||||
error=f"Failed to parse expectedOutputJson: {unit_test['expectedOutputJson']}: {str(ex)}",
|
||||
)
|
||||
|
||||
script = task.task_spec.script
|
||||
return cls.run_with_script_and_pre_post_contexts(
|
||||
script, input_context, expected_output_context
|
||||
)
|
||||
return cls.run_with_script_and_pre_post_contexts(script, input_context, expected_output_context)
|
||||
|
|
|
@ -16,10 +16,7 @@ class SecretService:
|
|||
@classmethod
|
||||
def _encrypt(cls, value: str) -> str:
|
||||
encrypted_bytes: bytes = b""
|
||||
if (
|
||||
current_app.config.get("SPIFFWORKFLOW_BACKEND_ENCRYPTION_LIB")
|
||||
== "cryptography"
|
||||
):
|
||||
if current_app.config.get("SPIFFWORKFLOW_BACKEND_ENCRYPTION_LIB") == "cryptography":
|
||||
# cryptography needs a bytes object
|
||||
value_as_bytes = str.encode(value)
|
||||
encrypted_bytes = current_app.config["CIPHER"].encrypt(value_as_bytes)
|
||||
|
@ -98,9 +95,7 @@ class SecretService:
|
|||
else:
|
||||
raise ApiError(
|
||||
error_code="update_secret_error",
|
||||
message=(
|
||||
f"Cannot update secret with key: {key}. Resource does not exist."
|
||||
),
|
||||
message=f"Cannot update secret with key: {key}. Resource does not exist.",
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
|
@ -115,16 +110,11 @@ class SecretService:
|
|||
except Exception as e:
|
||||
raise ApiError(
|
||||
error_code="delete_secret_error",
|
||||
message=(
|
||||
f"Could not delete secret with key: {key}. Original error"
|
||||
f" is: {e}"
|
||||
),
|
||||
message=f"Could not delete secret with key: {key}. Original error is: {e}",
|
||||
) from e
|
||||
else:
|
||||
raise ApiError(
|
||||
error_code="delete_secret_error",
|
||||
message=(
|
||||
f"Cannot delete secret with key: {key}. Resource does not exist."
|
||||
),
|
||||
message=f"Cannot delete secret with key: {key}. Resource does not exist.",
|
||||
status_code=404,
|
||||
)
|
||||
|
|
|
@ -49,37 +49,19 @@ class ServiceTaskDelegate:
|
|||
"""Given a code like 404, return a string like: The requested resource was not found."""
|
||||
msg = f"HTTP Status Code {code}."
|
||||
if code == 301:
|
||||
msg = (
|
||||
"301 (Permanent Redirect) - you may need to use a different URL in this"
|
||||
" service task."
|
||||
)
|
||||
msg = "301 (Permanent Redirect) - you may need to use a different URL in this service task."
|
||||
if code == 302:
|
||||
msg = (
|
||||
"302 (Temporary Redirect) - you may need to use a different URL in this"
|
||||
" service task."
|
||||
)
|
||||
msg = "302 (Temporary Redirect) - you may need to use a different URL in this service task."
|
||||
if code == 400:
|
||||
msg = (
|
||||
"400 (Bad Request) - The request was received by the service, but it"
|
||||
" was not understood."
|
||||
)
|
||||
msg = "400 (Bad Request) - The request was received by the service, but it was not understood."
|
||||
if code == 401:
|
||||
msg = (
|
||||
"401 (Unauthorized Error) - this end point requires some form of"
|
||||
" authentication."
|
||||
)
|
||||
msg = "401 (Unauthorized Error) - this end point requires some form of authentication."
|
||||
if code == 403:
|
||||
msg = (
|
||||
"403 (Forbidden) - The service you called refused to accept the"
|
||||
" request."
|
||||
)
|
||||
msg = "403 (Forbidden) - The service you called refused to accept the request."
|
||||
if code == 404:
|
||||
msg = "404 (Not Found) - The service did not find the requested resource."
|
||||
if code == 500:
|
||||
msg = (
|
||||
"500 (Internal Server Error) - The service you called is experiencing"
|
||||
" technical difficulties."
|
||||
)
|
||||
msg = "500 (Internal Server Error) - The service you called is experiencing technical difficulties."
|
||||
if code == 501:
|
||||
msg = (
|
||||
"501 (Not Implemented) - This service needs to be called with the"
|
||||
|
@ -94,10 +76,7 @@ class ServiceTaskDelegate:
|
|||
current_app.logger.info(f"Calling connector proxy using connector: {name}")
|
||||
with sentry_sdk.start_span(op="connector_by_name", description=name):
|
||||
with sentry_sdk.start_span(op="call-connector", description=call_url):
|
||||
params = {
|
||||
k: ServiceTaskDelegate.check_prefixes(v["value"])
|
||||
for k, v in bpmn_params.items()
|
||||
}
|
||||
params = {k: ServiceTaskDelegate.check_prefixes(v["value"]) for k, v in bpmn_params.items()}
|
||||
params["spiff__task_data"] = task_data
|
||||
|
||||
proxied_response = requests.post(call_url, json=params)
|
||||
|
@ -113,20 +92,12 @@ class ServiceTaskDelegate:
|
|||
parsed_response = {}
|
||||
|
||||
if proxied_response.status_code >= 300:
|
||||
message = ServiceTaskDelegate.get_message_for_status(
|
||||
proxied_response.status_code
|
||||
)
|
||||
error = (
|
||||
f"Received an unexpected response from service {name} :"
|
||||
f" {message}"
|
||||
)
|
||||
message = ServiceTaskDelegate.get_message_for_status(proxied_response.status_code)
|
||||
error = f"Received an unexpected response from service {name} : {message}"
|
||||
if "error" in parsed_response:
|
||||
error += parsed_response["error"]
|
||||
if json_parse_error:
|
||||
error += (
|
||||
"A critical component (The connector proxy) is not"
|
||||
" responding correctly."
|
||||
)
|
||||
error += "A critical component (The connector proxy) is not responding correctly."
|
||||
raise ConnectorProxyError(error)
|
||||
elif json_parse_error:
|
||||
raise ConnectorProxyError(
|
||||
|
|
|
@ -48,14 +48,10 @@ class SpecFileService(FileSystemService):
|
|||
extension_filter: str = "",
|
||||
) -> List[File]:
|
||||
"""Return all files associated with a workflow specification."""
|
||||
path = os.path.join(
|
||||
FileSystemService.root_path(), process_model_info.id_for_file_path()
|
||||
)
|
||||
path = os.path.join(FileSystemService.root_path(), process_model_info.id_for_file_path())
|
||||
files = SpecFileService._get_files(path, file_name)
|
||||
if extension_filter != "":
|
||||
files = list(
|
||||
filter(lambda file: file.name.endswith(extension_filter), files)
|
||||
)
|
||||
files = list(filter(lambda file: file.name.endswith(extension_filter), files))
|
||||
return files
|
||||
|
||||
@staticmethod
|
||||
|
@ -74,23 +70,17 @@ class SpecFileService(FileSystemService):
|
|||
files = SpecFileService.get_files(process_model_info)
|
||||
references = []
|
||||
for file in files:
|
||||
references.extend(
|
||||
SpecFileService.get_references_for_file(file, process_model_info)
|
||||
)
|
||||
references.extend(SpecFileService.get_references_for_file(file, process_model_info))
|
||||
return references
|
||||
|
||||
@classmethod
|
||||
def get_references_for_file(
|
||||
cls, file: File, process_model_info: ProcessModelInfo
|
||||
) -> list[SpecReference]:
|
||||
def get_references_for_file(cls, file: File, process_model_info: ProcessModelInfo) -> list[SpecReference]:
|
||||
"""Get_references_for_file."""
|
||||
full_file_path = SpecFileService.full_file_path(process_model_info, file.name)
|
||||
file_contents: bytes = b""
|
||||
with open(full_file_path) as f:
|
||||
file_contents = f.read().encode()
|
||||
return cls.get_references_for_file_contents(
|
||||
process_model_info, file.name, file_contents
|
||||
)
|
||||
return cls.get_references_for_file_contents(process_model_info, file.name, file_contents)
|
||||
|
||||
@classmethod
|
||||
def get_etree_from_xml_bytes(cls, binary_data: bytes) -> etree.Element:
|
||||
|
@ -139,9 +129,7 @@ class SpecFileService(FileSystemService):
|
|||
has_lanes = sub_parser.has_lanes()
|
||||
is_executable = sub_parser.process_executable
|
||||
start_messages = sub_parser.start_messages()
|
||||
is_primary = (
|
||||
sub_parser.get_id() == process_model_info.primary_process_id
|
||||
)
|
||||
is_primary = sub_parser.get_id() == process_model_info.primary_process_id
|
||||
|
||||
references.append(
|
||||
SpecReference(
|
||||
|
@ -162,9 +150,7 @@ class SpecFileService(FileSystemService):
|
|||
return references
|
||||
|
||||
@staticmethod
|
||||
def add_file(
|
||||
process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes
|
||||
) -> File:
|
||||
def add_file(process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes) -> File:
|
||||
"""Add_file."""
|
||||
# Same as update
|
||||
return SpecFileService.update_file(process_model_info, file_name, binary_data)
|
||||
|
@ -177,28 +163,20 @@ class SpecFileService(FileSystemService):
|
|||
BpmnValidator()
|
||||
parser = MyCustomParser()
|
||||
try:
|
||||
parser.add_bpmn_xml(
|
||||
cls.get_etree_from_xml_bytes(binary_data), filename=file_name
|
||||
)
|
||||
parser.add_bpmn_xml(cls.get_etree_from_xml_bytes(binary_data), filename=file_name)
|
||||
except Exception as exception:
|
||||
raise ProcessModelFileInvalidError(
|
||||
f"Received error trying to parse bpmn xml: {str(exception)}"
|
||||
) from exception
|
||||
|
||||
@classmethod
|
||||
def update_file(
|
||||
cls, process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes
|
||||
) -> File:
|
||||
def update_file(cls, process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes) -> File:
|
||||
"""Update_file."""
|
||||
SpecFileService.assert_valid_file_name(file_name)
|
||||
cls.validate_bpmn_xml(file_name, binary_data)
|
||||
|
||||
references = cls.get_references_for_file_contents(
|
||||
process_model_info, file_name, binary_data
|
||||
)
|
||||
primary_process_ref = next(
|
||||
(ref for ref in references if ref.is_primary and ref.is_executable), None
|
||||
)
|
||||
references = cls.get_references_for_file_contents(process_model_info, file_name, binary_data)
|
||||
primary_process_ref = next((ref for ref in references if ref.is_primary and ref.is_executable), None)
|
||||
|
||||
SpecFileService.clear_caches_for_file(file_name, process_model_info)
|
||||
for ref in references:
|
||||
|
@ -233,8 +211,7 @@ class SpecFileService(FileSystemService):
|
|||
full_file_path = SpecFileService.full_file_path(process_model_info, file_name)
|
||||
if not os.path.exists(full_file_path):
|
||||
raise ProcessModelFileNotFoundError(
|
||||
f"No file found with name {file_name} in"
|
||||
f" {process_model_info.display_name}"
|
||||
f"No file found with name {file_name} in {process_model_info.display_name}"
|
||||
)
|
||||
with open(full_file_path, "rb") as f_handle:
|
||||
spec_file_data = f_handle.read()
|
||||
|
@ -243,9 +220,7 @@ class SpecFileService(FileSystemService):
|
|||
@staticmethod
|
||||
def full_file_path(spec: ProcessModelInfo, file_name: str) -> str:
|
||||
"""File_path."""
|
||||
return os.path.abspath(
|
||||
os.path.join(SpecFileService.workflow_path(spec), file_name)
|
||||
)
|
||||
return os.path.abspath(os.path.join(SpecFileService.workflow_path(spec), file_name))
|
||||
|
||||
@staticmethod
|
||||
def last_modified(spec: ProcessModelInfo, file_name: str) -> datetime:
|
||||
|
@ -288,13 +263,11 @@ class SpecFileService(FileSystemService):
|
|||
SpecFileService.update_correlation_cache(ref)
|
||||
|
||||
@staticmethod
|
||||
def clear_caches_for_file(
|
||||
file_name: str, process_model_info: ProcessModelInfo
|
||||
) -> None:
|
||||
def clear_caches_for_file(file_name: str, process_model_info: ProcessModelInfo) -> None:
|
||||
"""Clear all caches related to a file."""
|
||||
db.session.query(SpecReferenceCache).filter(
|
||||
SpecReferenceCache.file_name == file_name
|
||||
).filter(SpecReferenceCache.process_model_id == process_model_info.id).delete()
|
||||
db.session.query(SpecReferenceCache).filter(SpecReferenceCache.file_name == file_name).filter(
|
||||
SpecReferenceCache.process_model_id == process_model_info.id
|
||||
).delete()
|
||||
# fixme: likely the other caches should be cleared as well, but we don't have a clean way to do so yet.
|
||||
|
||||
@staticmethod
|
||||
|
@ -307,9 +280,7 @@ class SpecFileService(FileSystemService):
|
|||
def update_process_cache(ref: SpecReference) -> None:
|
||||
"""Update_process_cache."""
|
||||
process_id_lookup = (
|
||||
SpecReferenceCache.query.filter_by(identifier=ref.identifier)
|
||||
.filter_by(type=ref.type)
|
||||
.first()
|
||||
SpecReferenceCache.query.filter_by(identifier=ref.identifier).filter_by(type=ref.type).first()
|
||||
)
|
||||
if process_id_lookup is None:
|
||||
process_id_lookup = SpecReferenceCache.from_spec_reference(ref)
|
||||
|
@ -317,9 +288,7 @@ class SpecFileService(FileSystemService):
|
|||
db.session.commit()
|
||||
else:
|
||||
if ref.relative_path != process_id_lookup.relative_path:
|
||||
full_bpmn_file_path = SpecFileService.full_path_from_relative_path(
|
||||
process_id_lookup.relative_path
|
||||
)
|
||||
full_bpmn_file_path = SpecFileService.full_path_from_relative_path(process_id_lookup.relative_path)
|
||||
# if the old relative bpmn file no longer exists, then assume things were moved around
|
||||
# on the file system. Otherwise, assume it is a duplicate process id and error.
|
||||
if os.path.isfile(full_bpmn_file_path):
|
||||
|
@ -351,11 +320,9 @@ class SpecFileService(FileSystemService):
|
|||
def update_message_trigger_cache(ref: SpecReference) -> None:
|
||||
"""Assure we know which messages can trigger the start of a process."""
|
||||
for message_name in ref.start_messages:
|
||||
message_triggerable_process_model = (
|
||||
MessageTriggerableProcessModel.query.filter_by(
|
||||
message_name=message_name,
|
||||
).first()
|
||||
)
|
||||
message_triggerable_process_model = MessageTriggerableProcessModel.query.filter_by(
|
||||
message_name=message_name,
|
||||
).first()
|
||||
if message_triggerable_process_model is None:
|
||||
message_triggerable_process_model = MessageTriggerableProcessModel(
|
||||
message_name=message_name,
|
||||
|
@ -364,22 +331,16 @@ class SpecFileService(FileSystemService):
|
|||
db.session.add(message_triggerable_process_model)
|
||||
db.session.commit()
|
||||
else:
|
||||
if (
|
||||
message_triggerable_process_model.process_model_identifier
|
||||
!= ref.process_model_id
|
||||
):
|
||||
if message_triggerable_process_model.process_model_identifier != ref.process_model_id:
|
||||
raise ProcessModelFileInvalidError(
|
||||
"Message model is already used to start process model"
|
||||
f" {ref.process_model_id}"
|
||||
f"Message model is already used to start process model {ref.process_model_id}"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_correlation_cache(ref: SpecReference) -> None:
|
||||
"""Update_correlation_cache."""
|
||||
for name in ref.correlations.keys():
|
||||
correlation_property_retrieval_expressions = ref.correlations[name][
|
||||
"retrieval_expressions"
|
||||
]
|
||||
correlation_property_retrieval_expressions = ref.correlations[name]["retrieval_expressions"]
|
||||
|
||||
for cpre in correlation_property_retrieval_expressions:
|
||||
message_name = ref.messages.get(cpre["messageRef"], None)
|
||||
|
|
|
@ -37,14 +37,10 @@ class TaskService:
|
|||
on_duplicate_key_stmt = None
|
||||
if current_app.config["SPIFFWORKFLOW_BACKEND_DATABASE_TYPE"] == "mysql":
|
||||
insert_stmt = mysql_insert(JsonDataModel).values(list_of_dicts)
|
||||
on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update(
|
||||
data=insert_stmt.inserted.data
|
||||
)
|
||||
on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update(data=insert_stmt.inserted.data)
|
||||
else:
|
||||
insert_stmt = postgres_insert(JsonDataModel).values(list_of_dicts)
|
||||
on_duplicate_key_stmt = insert_stmt.on_conflict_do_nothing(
|
||||
index_elements=["hash"]
|
||||
)
|
||||
on_duplicate_key_stmt = insert_stmt.on_conflict_do_nothing(index_elements=["hash"])
|
||||
db.session.execute(on_duplicate_key_stmt)
|
||||
|
||||
@classmethod
|
||||
|
@ -61,17 +57,11 @@ class TaskService:
|
|||
"""
|
||||
new_properties_json = serializer.task_to_dict(spiff_task)
|
||||
spiff_task_data = new_properties_json.pop("data")
|
||||
python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(
|
||||
spiff_task, serializer
|
||||
)
|
||||
python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer)
|
||||
task_model.properties_json = new_properties_json
|
||||
task_model.state = TaskStateNames[new_properties_json["state"]]
|
||||
json_data_dict = cls._update_task_data_on_task_model(
|
||||
task_model, spiff_task_data, "json_data_hash"
|
||||
)
|
||||
python_env_dict = cls._update_task_data_on_task_model(
|
||||
task_model, python_env_data_dict, "python_env_data_hash"
|
||||
)
|
||||
json_data_dict = cls._update_task_data_on_task_model(task_model, spiff_task_data, "json_data_hash")
|
||||
python_env_dict = cls._update_task_data_on_task_model(task_model, python_env_data_dict, "python_env_data_hash")
|
||||
return [json_data_dict, python_env_dict]
|
||||
|
||||
@classmethod
|
||||
|
@ -81,16 +71,9 @@ class TaskService:
|
|||
process_instance: ProcessInstanceModel,
|
||||
serializer: BpmnWorkflowSerializer,
|
||||
bpmn_definition_to_task_definitions_mappings: dict,
|
||||
) -> Tuple[
|
||||
Optional[BpmnProcessModel],
|
||||
TaskModel,
|
||||
dict[str, TaskModel],
|
||||
dict[str, JsonDataDict],
|
||||
]:
|
||||
) -> Tuple[Optional[BpmnProcessModel], TaskModel, dict[str, TaskModel], dict[str, JsonDataDict]]:
|
||||
spiff_task_guid = str(spiff_task.id)
|
||||
task_model: Optional[TaskModel] = TaskModel.query.filter_by(
|
||||
guid=spiff_task_guid
|
||||
).first()
|
||||
task_model: Optional[TaskModel] = TaskModel.query.filter_by(guid=spiff_task_guid).first()
|
||||
bpmn_process = None
|
||||
new_task_models: dict[str, TaskModel] = {}
|
||||
new_json_data_dicts: dict[str, JsonDataDict] = {}
|
||||
|
@ -103,9 +86,9 @@ class TaskService:
|
|||
)
|
||||
task_model = TaskModel.query.filter_by(guid=spiff_task_guid).first()
|
||||
if task_model is None:
|
||||
task_definition = bpmn_definition_to_task_definitions_mappings[
|
||||
spiff_task.workflow.spec.name
|
||||
][spiff_task.task_spec.name]
|
||||
task_definition = bpmn_definition_to_task_definitions_mappings[spiff_task.workflow.spec.name][
|
||||
spiff_task.task_spec.name
|
||||
]
|
||||
task_model = TaskModel(
|
||||
guid=spiff_task_guid,
|
||||
bpmn_process_id=bpmn_process.id,
|
||||
|
@ -115,9 +98,7 @@ class TaskService:
|
|||
return (bpmn_process, task_model, new_task_models, new_json_data_dicts)
|
||||
|
||||
@classmethod
|
||||
def task_subprocess(
|
||||
cls, spiff_task: SpiffTask
|
||||
) -> Tuple[Optional[str], Optional[BpmnWorkflow]]:
|
||||
def task_subprocess(cls, spiff_task: SpiffTask) -> Tuple[Optional[str], Optional[BpmnWorkflow]]:
|
||||
top_level_workflow = spiff_task.workflow._get_outermost_workflow()
|
||||
my_wf = spiff_task.workflow # This is the workflow the spiff_task is part of
|
||||
my_sp = None
|
||||
|
@ -149,31 +130,25 @@ class TaskService:
|
|||
# check for bpmn_process_id because mypy doesn't realize bpmn_process can be None
|
||||
if process_instance.bpmn_process_id is None:
|
||||
spiff_workflow = spiff_task.workflow._get_outermost_workflow()
|
||||
bpmn_process, new_task_models, new_json_data_dicts = (
|
||||
cls.add_bpmn_process(
|
||||
bpmn_process_dict=serializer.workflow_to_dict(spiff_workflow),
|
||||
process_instance=process_instance,
|
||||
bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings,
|
||||
spiff_workflow=spiff_workflow,
|
||||
serializer=serializer,
|
||||
)
|
||||
bpmn_process, new_task_models, new_json_data_dicts = cls.add_bpmn_process(
|
||||
bpmn_process_dict=serializer.workflow_to_dict(spiff_workflow),
|
||||
process_instance=process_instance,
|
||||
bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings,
|
||||
spiff_workflow=spiff_workflow,
|
||||
serializer=serializer,
|
||||
)
|
||||
else:
|
||||
bpmn_process = BpmnProcessModel.query.filter_by(
|
||||
guid=subprocess_guid
|
||||
).first()
|
||||
bpmn_process = BpmnProcessModel.query.filter_by(guid=subprocess_guid).first()
|
||||
if bpmn_process is None:
|
||||
spiff_workflow = spiff_task.workflow
|
||||
bpmn_process, new_task_models, new_json_data_dicts = (
|
||||
cls.add_bpmn_process(
|
||||
bpmn_process_dict=serializer.workflow_to_dict(subprocess),
|
||||
process_instance=process_instance,
|
||||
bpmn_process_parent=process_instance.bpmn_process,
|
||||
bpmn_process_guid=subprocess_guid,
|
||||
bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings,
|
||||
spiff_workflow=spiff_workflow,
|
||||
serializer=serializer,
|
||||
)
|
||||
bpmn_process, new_task_models, new_json_data_dicts = cls.add_bpmn_process(
|
||||
bpmn_process_dict=serializer.workflow_to_dict(subprocess),
|
||||
process_instance=process_instance,
|
||||
bpmn_process_parent=process_instance.bpmn_process,
|
||||
bpmn_process_guid=subprocess_guid,
|
||||
bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings,
|
||||
spiff_workflow=spiff_workflow,
|
||||
serializer=serializer,
|
||||
)
|
||||
return (bpmn_process, new_task_models, new_json_data_dicts)
|
||||
|
||||
|
@ -221,9 +196,7 @@ class TaskService:
|
|||
bpmn_process.properties_json = bpmn_process_dict
|
||||
|
||||
bpmn_process_data_json = json.dumps(bpmn_process_data_dict, sort_keys=True)
|
||||
bpmn_process_data_hash = sha256(
|
||||
bpmn_process_data_json.encode("utf8")
|
||||
).hexdigest()
|
||||
bpmn_process_data_hash = sha256(bpmn_process_data_json.encode("utf8")).hexdigest()
|
||||
if bpmn_process.json_data_hash != bpmn_process_data_hash:
|
||||
new_json_data_dicts[bpmn_process_data_hash] = {
|
||||
"hash": bpmn_process_data_hash,
|
||||
|
@ -272,9 +245,7 @@ class TaskService:
|
|||
if json_data_dict is not None:
|
||||
new_json_data_dicts[json_data_dict["hash"]] = json_data_dict
|
||||
|
||||
python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(
|
||||
spiff_task, serializer
|
||||
)
|
||||
python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer)
|
||||
python_env_dict = TaskService._update_task_data_on_task_model(
|
||||
task_model, python_env_data_dict, "python_env_data_hash"
|
||||
)
|
||||
|
@ -303,9 +274,9 @@ class TaskService:
|
|||
spiff_task: SpiffTask,
|
||||
bpmn_definition_to_task_definitions_mappings: dict,
|
||||
) -> TaskModel:
|
||||
task_definition = bpmn_definition_to_task_definitions_mappings[
|
||||
spiff_task.workflow.spec.name
|
||||
][spiff_task.task_spec.name]
|
||||
task_definition = bpmn_definition_to_task_definitions_mappings[spiff_task.workflow.spec.name][
|
||||
spiff_task.task_spec.name
|
||||
]
|
||||
task_model = TaskModel(
|
||||
guid=str(spiff_task.id),
|
||||
bpmn_process_id=bpmn_process.id,
|
||||
|
@ -318,9 +289,7 @@ class TaskService:
|
|||
def _get_python_env_data_dict_from_spiff_task(
|
||||
cls, spiff_task: SpiffTask, serializer: BpmnWorkflowSerializer
|
||||
) -> dict:
|
||||
user_defined_state = (
|
||||
spiff_task.workflow.script_engine.environment.user_defined_state()
|
||||
)
|
||||
user_defined_state = spiff_task.workflow.script_engine.environment.user_defined_state()
|
||||
# this helps to convert items like datetime objects to be json serializable
|
||||
converted_data: dict = serializer.data_converter.convert(user_defined_state)
|
||||
return converted_data
|
||||
|
|
|
@ -35,9 +35,7 @@ class UserService:
|
|||
) -> UserModel:
|
||||
"""Create_user."""
|
||||
user_model: Optional[UserModel] = (
|
||||
UserModel.query.filter(UserModel.service == service)
|
||||
.filter(UserModel.service_id == service_id)
|
||||
.first()
|
||||
UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first()
|
||||
)
|
||||
if user_model is None:
|
||||
if username == "":
|
||||
|
@ -89,19 +87,13 @@ class UserService:
|
|||
def current_user() -> Any:
|
||||
"""Current_user."""
|
||||
if not UserService.has_user():
|
||||
raise ApiError(
|
||||
"logged_out", "You are no longer logged in.", status_code=401
|
||||
)
|
||||
raise ApiError("logged_out", "You are no longer logged in.", status_code=401)
|
||||
return g.user
|
||||
|
||||
@staticmethod
|
||||
def get_principal_by_user_id(user_id: int) -> PrincipalModel:
|
||||
"""Get_principal_by_user_id."""
|
||||
principal = (
|
||||
db.session.query(PrincipalModel)
|
||||
.filter(PrincipalModel.user_id == user_id)
|
||||
.first()
|
||||
)
|
||||
principal = db.session.query(PrincipalModel).filter(PrincipalModel.user_id == user_id).first()
|
||||
if isinstance(principal, PrincipalModel):
|
||||
return principal
|
||||
raise ApiError(
|
||||
|
@ -110,14 +102,10 @@ class UserService:
|
|||
)
|
||||
|
||||
@classmethod
|
||||
def create_principal(
|
||||
cls, child_id: int, id_column_name: str = "user_id"
|
||||
) -> PrincipalModel:
|
||||
def create_principal(cls, child_id: int, id_column_name: str = "user_id") -> PrincipalModel:
|
||||
"""Create_principal."""
|
||||
column = PrincipalModel.__table__.columns[id_column_name]
|
||||
principal: Optional[PrincipalModel] = PrincipalModel.query.filter(
|
||||
column == child_id
|
||||
).first()
|
||||
principal: Optional[PrincipalModel] = PrincipalModel.query.filter(column == child_id).first()
|
||||
if principal is None:
|
||||
principal = PrincipalModel()
|
||||
setattr(principal, id_column_name, child_id)
|
||||
|
@ -136,12 +124,7 @@ class UserService:
|
|||
@classmethod
|
||||
def add_user_to_group(cls, user: UserModel, group: GroupModel) -> None:
|
||||
"""Add_user_to_group."""
|
||||
exists = (
|
||||
UserGroupAssignmentModel()
|
||||
.query.filter_by(user_id=user.id)
|
||||
.filter_by(group_id=group.id)
|
||||
.count()
|
||||
)
|
||||
exists = UserGroupAssignmentModel().query.filter_by(user_id=user.id).filter_by(group_id=group.id).count()
|
||||
if not exists:
|
||||
ugam = UserGroupAssignmentModel(user_id=user.id, group_id=group.id)
|
||||
db.session.add(ugam)
|
||||
|
@ -151,15 +134,10 @@ class UserService:
|
|||
def add_waiting_group_assignment(cls, username: str, group: GroupModel) -> None:
|
||||
"""Add_waiting_group_assignment."""
|
||||
wugam = (
|
||||
UserGroupAssignmentWaitingModel()
|
||||
.query.filter_by(username=username)
|
||||
.filter_by(group_id=group.id)
|
||||
.first()
|
||||
UserGroupAssignmentWaitingModel().query.filter_by(username=username).filter_by(group_id=group.id).first()
|
||||
)
|
||||
if not wugam:
|
||||
wugam = UserGroupAssignmentWaitingModel(
|
||||
username=username, group_id=group.id
|
||||
)
|
||||
wugam = UserGroupAssignmentWaitingModel(username=username, group_id=group.id)
|
||||
db.session.add(wugam)
|
||||
db.session.commit()
|
||||
if wugam.is_match_all():
|
||||
|
@ -179,10 +157,7 @@ class UserService:
|
|||
db.session.delete(assignment)
|
||||
wildcard = (
|
||||
UserGroupAssignmentWaitingModel()
|
||||
.query.filter(
|
||||
UserGroupAssignmentWaitingModel.username
|
||||
== UserGroupAssignmentWaitingModel.MATCH_ALL_USERS
|
||||
)
|
||||
.query.filter(UserGroupAssignmentWaitingModel.username == UserGroupAssignmentWaitingModel.MATCH_ALL_USERS)
|
||||
.all()
|
||||
)
|
||||
for assignment in wildcard:
|
||||
|
@ -190,14 +165,10 @@ class UserService:
|
|||
db.session.commit()
|
||||
|
||||
@staticmethod
|
||||
def get_user_by_service_and_service_id(
|
||||
service: str, service_id: str
|
||||
) -> Optional[UserModel]:
|
||||
def get_user_by_service_and_service_id(service: str, service_id: str) -> Optional[UserModel]:
|
||||
"""Get_user_by_service_and_service_id."""
|
||||
user: UserModel = (
|
||||
UserModel.query.filter(UserModel.service == service)
|
||||
.filter(UserModel.service_id == service_id)
|
||||
.first()
|
||||
UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first()
|
||||
)
|
||||
if user:
|
||||
return user
|
||||
|
@ -211,8 +182,6 @@ class UserService:
|
|||
HumanTaskModel.lane_assignment_id.in_(group_ids) # type: ignore
|
||||
).all()
|
||||
for human_task in human_tasks:
|
||||
human_task_user = HumanTaskUserModel(
|
||||
user_id=user.id, human_task_id=human_task.id
|
||||
)
|
||||
human_task_user = HumanTaskUserModel(user_id=user.id, human_task_id=human_task.id)
|
||||
db.session.add(human_task_user)
|
||||
db.session.commit()
|
||||
|
|
|
@ -62,9 +62,7 @@ class TaskModelSavingDelegate(EngineStepDelegate):
|
|||
) -> None:
|
||||
self.secondary_engine_step_delegate = secondary_engine_step_delegate
|
||||
self.process_instance = process_instance
|
||||
self.bpmn_definition_to_task_definitions_mappings = (
|
||||
bpmn_definition_to_task_definitions_mappings
|
||||
)
|
||||
self.bpmn_definition_to_task_definitions_mappings = bpmn_definition_to_task_definitions_mappings
|
||||
|
||||
self.current_task_model: Optional[TaskModel] = None
|
||||
self.task_models: dict[str, TaskModel] = {}
|
||||
|
@ -78,9 +76,7 @@ class TaskModelSavingDelegate(EngineStepDelegate):
|
|||
"""
|
||||
return self.process_instance.bpmn_process_id is not None
|
||||
|
||||
def _update_json_data_dicts_using_list(
|
||||
self, json_data_dict_list: list[Optional[JsonDataDict]]
|
||||
) -> None:
|
||||
def _update_json_data_dicts_using_list(self, json_data_dict_list: list[Optional[JsonDataDict]]) -> None:
|
||||
for json_data_dict in json_data_dict_list:
|
||||
if json_data_dict is not None:
|
||||
self.json_data_dicts[json_data_dict["hash"]] = json_data_dict
|
||||
|
@ -105,9 +101,7 @@ class TaskModelSavingDelegate(EngineStepDelegate):
|
|||
def did_complete_task(self, spiff_task: SpiffTask) -> None:
|
||||
if self.current_task_model and self.should_update_task_model():
|
||||
self.current_task_model.end_in_seconds = time.time()
|
||||
json_data_dict_list = TaskService.update_task_model(
|
||||
self.current_task_model, spiff_task, self.serializer
|
||||
)
|
||||
json_data_dict_list = TaskService.update_task_model(self.current_task_model, spiff_task, self.serializer)
|
||||
self._update_json_data_dicts_using_list(json_data_dict_list)
|
||||
self.task_models[self.current_task_model.guid] = self.current_task_model
|
||||
if self.secondary_engine_step_delegate:
|
||||
|
@ -126,11 +120,7 @@ class TaskModelSavingDelegate(EngineStepDelegate):
|
|||
if self.should_update_task_model():
|
||||
# excludes FUTURE and COMPLETED. the others were required to get PP1 to go to completion.
|
||||
for waiting_spiff_task in bpmn_process_instance.get_tasks(
|
||||
TaskState.WAITING
|
||||
| TaskState.CANCELLED
|
||||
| TaskState.READY
|
||||
| TaskState.MAYBE
|
||||
| TaskState.LIKELY
|
||||
TaskState.WAITING | TaskState.CANCELLED | TaskState.READY | TaskState.MAYBE | TaskState.LIKELY
|
||||
):
|
||||
bpmn_process, task_model, new_task_models, new_json_data_dicts = (
|
||||
TaskService.find_or_create_task_model_from_spiff_task(
|
||||
|
@ -142,9 +132,7 @@ class TaskModelSavingDelegate(EngineStepDelegate):
|
|||
)
|
||||
self.task_models.update(new_task_models)
|
||||
self.json_data_dicts.update(new_json_data_dicts)
|
||||
json_data_dict_list = TaskService.update_task_model(
|
||||
task_model, waiting_spiff_task, self.serializer
|
||||
)
|
||||
json_data_dict_list = TaskService.update_task_model(task_model, waiting_spiff_task, self.serializer)
|
||||
self.task_models[task_model.guid] = task_model
|
||||
self._update_json_data_dicts_using_list(json_data_dict_list)
|
||||
|
||||
|
@ -180,9 +168,8 @@ class StepDetailLoggingDelegate(EngineStepDelegate):
|
|||
}
|
||||
|
||||
def should_log(self, spiff_task: SpiffTask) -> bool:
|
||||
return (
|
||||
spiff_task.task_spec.spec_type in self.tasks_to_log
|
||||
and not spiff_task.task_spec.name.endswith(".EndJoin")
|
||||
return spiff_task.task_spec.spec_type in self.tasks_to_log and not spiff_task.task_spec.name.endswith(
|
||||
".EndJoin"
|
||||
)
|
||||
|
||||
def will_complete_task(self, spiff_task: SpiffTask) -> None:
|
||||
|
@ -193,9 +180,7 @@ class StepDetailLoggingDelegate(EngineStepDelegate):
|
|||
def did_complete_task(self, spiff_task: SpiffTask) -> None:
|
||||
if self.should_log(spiff_task):
|
||||
self.step_details.append(
|
||||
self.spiff_step_details_mapping(
|
||||
spiff_task, self.current_task_start_in_seconds, time.time()
|
||||
)
|
||||
self.spiff_step_details_mapping(spiff_task, self.current_task_start_in_seconds, time.time())
|
||||
)
|
||||
|
||||
def save(self, commit: bool = True) -> None:
|
||||
|
@ -211,9 +196,7 @@ class ExecutionStrategy:
|
|||
"""__init__."""
|
||||
self.delegate = delegate
|
||||
|
||||
def do_engine_steps(
|
||||
self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None
|
||||
) -> None:
|
||||
def do_engine_steps(self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None) -> None:
|
||||
pass
|
||||
|
||||
def save(self) -> None:
|
||||
|
@ -223,9 +206,7 @@ class ExecutionStrategy:
|
|||
class GreedyExecutionStrategy(ExecutionStrategy):
|
||||
"""The common execution strategy. This will greedily run all engine steps without stopping."""
|
||||
|
||||
def do_engine_steps(
|
||||
self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None
|
||||
) -> None:
|
||||
def do_engine_steps(self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None) -> None:
|
||||
bpmn_process_instance.do_engine_steps(
|
||||
exit_at=exit_at,
|
||||
will_complete_task=self.delegate.will_complete_task,
|
||||
|
@ -241,9 +222,7 @@ class RunUntilServiceTaskExecutionStrategy(ExecutionStrategy):
|
|||
return (to an interstitial page). The background processor would then take over.
|
||||
"""
|
||||
|
||||
def do_engine_steps(
|
||||
self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None
|
||||
) -> None:
|
||||
def do_engine_steps(self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None) -> None:
|
||||
engine_steps = list(
|
||||
[
|
||||
t
|
||||
|
@ -270,9 +249,7 @@ class RunUntilServiceTaskExecutionStrategy(ExecutionStrategy):
|
|||
self.delegate.after_engine_steps(bpmn_process_instance)
|
||||
|
||||
|
||||
def execution_strategy_named(
|
||||
name: str, delegate: EngineStepDelegate
|
||||
) -> ExecutionStrategy:
|
||||
def execution_strategy_named(name: str, delegate: EngineStepDelegate) -> ExecutionStrategy:
|
||||
cls = {
|
||||
"greedy": GreedyExecutionStrategy,
|
||||
"run_until_service_task": RunUntilServiceTaskExecutionStrategy,
|
||||
|
@ -305,9 +282,7 @@ class WorkflowExecutionService:
|
|||
|
||||
def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None:
|
||||
"""Do_engine_steps."""
|
||||
with safe_assertion(
|
||||
ProcessInstanceLockService.has_lock(self.process_instance_model.id)
|
||||
) as tripped:
|
||||
with safe_assertion(ProcessInstanceLockService.has_lock(self.process_instance_model.id)) as tripped:
|
||||
if tripped:
|
||||
raise AssertionError(
|
||||
"The current thread has not obtained a lock for this process"
|
||||
|
@ -364,9 +339,7 @@ class WorkflowExecutionService:
|
|||
def queue_waiting_receive_messages(self) -> None:
|
||||
"""Queue_waiting_receive_messages."""
|
||||
waiting_events = self.bpmn_process_instance.waiting_events()
|
||||
waiting_message_events = filter(
|
||||
lambda e: e["event_type"] == "Message", waiting_events
|
||||
)
|
||||
waiting_message_events = filter(lambda e: e["event_type"] == "Message", waiting_events)
|
||||
|
||||
for event in waiting_message_events:
|
||||
# Ensure we are only creating one message instance for each waiting message
|
||||
|
|
|
@ -54,9 +54,7 @@ class BaseTest:
|
|||
)
|
||||
|
||||
@staticmethod
|
||||
def logged_in_headers(
|
||||
user: UserModel, _redirect_url: str = "http://some/frontend/url"
|
||||
) -> Dict[str, str]:
|
||||
def logged_in_headers(user: UserModel, _redirect_url: str = "http://some/frontend/url") -> Dict[str, str]:
|
||||
"""Logged_in_headers."""
|
||||
return dict(Authorization="Bearer " + user.encode_auth_token())
|
||||
|
||||
|
@ -80,9 +78,7 @@ class BaseTest:
|
|||
if bpmn_file_location is None:
|
||||
bpmn_file_location = process_model_id
|
||||
|
||||
self.create_process_group(
|
||||
client, user, process_group_description, process_group_display_name
|
||||
)
|
||||
self.create_process_group(client, user, process_group_description, process_group_display_name)
|
||||
|
||||
self.create_process_model_with_api(
|
||||
client,
|
||||
|
@ -108,9 +104,7 @@ class BaseTest:
|
|||
display_name: str = "",
|
||||
) -> str:
|
||||
"""Create_process_group."""
|
||||
process_group = ProcessGroup(
|
||||
id=process_group_id, display_name=display_name, display_order=0, admin=False
|
||||
)
|
||||
process_group = ProcessGroup(id=process_group_id, display_name=display_name, display_order=0, admin=False)
|
||||
response = client.post(
|
||||
"/v1.0/process-groups",
|
||||
headers=self.logged_in_headers(user),
|
||||
|
@ -139,9 +133,7 @@ class BaseTest:
|
|||
# make sure we have a group
|
||||
process_group_id, _ = os.path.split(process_model_id)
|
||||
modified_process_group_id = process_group_id.replace("/", ":")
|
||||
process_group_path = os.path.abspath(
|
||||
os.path.join(FileSystemService.root_path(), process_group_id)
|
||||
)
|
||||
process_group_path = os.path.abspath(os.path.join(FileSystemService.root_path(), process_group_id))
|
||||
if ProcessModelService.is_process_group(process_group_path):
|
||||
if exception_notification_addresses is None:
|
||||
exception_notification_addresses = []
|
||||
|
@ -171,14 +163,9 @@ class BaseTest:
|
|||
else:
|
||||
raise Exception("You must create the group first")
|
||||
else:
|
||||
raise Exception(
|
||||
"You must include the process_model_id, which must be a path to the"
|
||||
" model"
|
||||
)
|
||||
raise Exception("You must include the process_model_id, which must be a path to the model")
|
||||
|
||||
def get_test_data_file_full_path(
|
||||
self, file_name: str, process_model_test_data_dir: str
|
||||
) -> str:
|
||||
def get_test_data_file_full_path(self, file_name: str, process_model_test_data_dir: str) -> str:
|
||||
"""Get_test_data_file_contents."""
|
||||
return os.path.join(
|
||||
current_app.instance_path,
|
||||
|
@ -190,13 +177,9 @@ class BaseTest:
|
|||
file_name,
|
||||
)
|
||||
|
||||
def get_test_data_file_contents(
|
||||
self, file_name: str, process_model_test_data_dir: str
|
||||
) -> bytes:
|
||||
def get_test_data_file_contents(self, file_name: str, process_model_test_data_dir: str) -> bytes:
|
||||
"""Get_test_data_file_contents."""
|
||||
file_full_path = self.get_test_data_file_full_path(
|
||||
file_name, process_model_test_data_dir
|
||||
)
|
||||
file_full_path = self.get_test_data_file_full_path(file_name, process_model_test_data_dir)
|
||||
with open(file_full_path, "rb") as file:
|
||||
return file.read()
|
||||
|
||||
|
@ -325,9 +308,7 @@ class BaseTest:
|
|||
) -> UserModel:
|
||||
"""Create_user_with_permission."""
|
||||
user = BaseTest.find_or_create_user(username=username)
|
||||
return cls.add_permissions_to_user(
|
||||
user, target_uri=target_uri, permission_names=permission_names
|
||||
)
|
||||
return cls.add_permissions_to_user(user, target_uri=target_uri, permission_names=permission_names)
|
||||
|
||||
@classmethod
|
||||
def add_permissions_to_user(
|
||||
|
@ -337,9 +318,7 @@ class BaseTest:
|
|||
permission_names: Optional[list[str]] = None,
|
||||
) -> UserModel:
|
||||
"""Add_permissions_to_user."""
|
||||
permission_target = AuthorizationService.find_or_create_permission_target(
|
||||
target_uri
|
||||
)
|
||||
permission_target = AuthorizationService.find_or_create_permission_target(target_uri)
|
||||
|
||||
if permission_names is None:
|
||||
permission_names = [member.name for member in Permission]
|
||||
|
@ -371,8 +350,6 @@ class BaseTest:
|
|||
"""Modify_process_identifier_for_path_param."""
|
||||
return ProcessModelInfo.modify_process_identifier_for_path_param(identifier)
|
||||
|
||||
def un_modify_modified_process_identifier_for_path_param(
|
||||
self, modified_identifier: str
|
||||
) -> str:
|
||||
def un_modify_modified_process_identifier_for_path_param(self, modified_identifier: str) -> str:
|
||||
"""Un_modify_modified_process_model_id."""
|
||||
return modified_identifier.replace(":", "/")
|
||||
|
|
|
@ -77,13 +77,9 @@ class ExampleDataLoader:
|
|||
try:
|
||||
file = open(file_path, "rb")
|
||||
data = file.read()
|
||||
file_info = SpecFileService.add_file(
|
||||
process_model_info=spec, file_name=filename, binary_data=data
|
||||
)
|
||||
file_info = SpecFileService.add_file(process_model_info=spec, file_name=filename, binary_data=data)
|
||||
if is_primary:
|
||||
references = SpecFileService.get_references_for_file(
|
||||
file_info, spec
|
||||
)
|
||||
references = SpecFileService.get_references_for_file(file_info, spec)
|
||||
spec.primary_process_id = references[0].identifier
|
||||
spec.primary_file_name = filename
|
||||
ProcessModelService.save_process_model(spec)
|
||||
|
|
|
@ -22,9 +22,7 @@ class TestForGoodErrors(BaseTest):
|
|||
) -> Any:
|
||||
"""Returns the next available user task for a given process instance, if possible."""
|
||||
human_tasks = (
|
||||
db.session.query(HumanTaskModel)
|
||||
.filter(HumanTaskModel.process_instance_id == process_instance_id)
|
||||
.all()
|
||||
db.session.query(HumanTaskModel).filter(HumanTaskModel.process_instance_id == process_instance_id).all()
|
||||
)
|
||||
assert len(human_tasks) > 0, "No human tasks found for process."
|
||||
human_task = human_tasks[0]
|
||||
|
@ -59,9 +57,7 @@ class TestForGoodErrors(BaseTest):
|
|||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
response = self.get_next_user_task(
|
||||
process_instance_id, client, with_super_admin_user
|
||||
)
|
||||
response = self.get_next_user_task(process_instance_id, client, with_super_admin_user)
|
||||
assert response.json is not None
|
||||
assert response.json["error_type"] == "TemplateSyntaxError"
|
||||
assert response.json["line_number"] == 3
|
||||
|
@ -88,9 +84,7 @@ class TestForGoodErrors(BaseTest):
|
|||
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model.id)}/{process_instance.id}/run",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
response = self.get_next_user_task(
|
||||
process_instance.id, client, with_super_admin_user
|
||||
)
|
||||
response = self.get_next_user_task(process_instance.id, client, with_super_admin_user)
|
||||
|
||||
assert response.status_code == 400
|
||||
assert response.json is not None
|
||||
|
|
|
@ -19,9 +19,7 @@ class TestLoggingService(BaseTest):
|
|||
"""Test_process_instance_run."""
|
||||
process_group_id = "test_logging_spiff_logger"
|
||||
process_model_id = "simple_script"
|
||||
self.create_process_group(
|
||||
client=client, user=with_super_admin_user, process_group_id=process_group_id
|
||||
)
|
||||
self.create_process_group(client=client, user=with_super_admin_user, process_group_id=process_group_id)
|
||||
process_model_identifier = f"{process_group_id}/{process_model_id}"
|
||||
# create the model
|
||||
self.create_process_model_with_api(
|
||||
|
@ -33,9 +31,7 @@ class TestLoggingService(BaseTest):
|
|||
)
|
||||
|
||||
bpmn_file_name = "simple_script.bpmn"
|
||||
bpmn_file_data_bytes = self.get_test_data_file_contents(
|
||||
bpmn_file_name, "simple_script"
|
||||
)
|
||||
bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, "simple_script")
|
||||
# add bpmn to the model
|
||||
self.create_spec_file(
|
||||
client=client,
|
||||
|
|
|
@ -49,9 +49,7 @@ class TestNestedGroups(BaseTest):
|
|||
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
process_instance = ProcessInstanceService().get_process_instance(process_instance_id)
|
||||
assert process_instance
|
||||
modified_process_group_id = process_group_id.replace("/", ":")
|
||||
response = client.delete(
|
||||
|
@ -61,10 +59,7 @@ class TestNestedGroups(BaseTest):
|
|||
assert response.status_code == 400
|
||||
assert response.json["error_code"] == "existing_instances"
|
||||
assert "We cannot delete the group" in response.json["message"]
|
||||
assert (
|
||||
"there are models with existing instances inside the group"
|
||||
in response.json["message"]
|
||||
)
|
||||
assert "there are models with existing instances inside the group" in response.json["message"]
|
||||
|
||||
def test_delete_group_with_running_instance_in_nested_group(
|
||||
self,
|
||||
|
@ -110,9 +105,7 @@ class TestNestedGroups(BaseTest):
|
|||
f"/v1.0/process-instances/{process_instance_id}/run",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
process_instance = ProcessInstanceService().get_process_instance(process_instance_id)
|
||||
assert process_instance
|
||||
modified_process_group_id = process_group_id.replace("/", ":")
|
||||
response = client.delete(
|
||||
|
@ -122,10 +115,7 @@ class TestNestedGroups(BaseTest):
|
|||
assert response.status_code == 400
|
||||
assert response.json["error_code"] == "existing_instances"
|
||||
assert "We cannot delete the group" in response.json["message"]
|
||||
assert (
|
||||
"there are models with existing instances inside the group"
|
||||
in response.json["message"]
|
||||
)
|
||||
assert "there are models with existing instances inside the group" in response.json["message"]
|
||||
|
||||
def test_nested_groups(
|
||||
self,
|
||||
|
@ -137,12 +127,8 @@ class TestNestedGroups(BaseTest):
|
|||
# /process-groups/{process_group_path}/show
|
||||
target_uri = "/v1.0/process-groups/group_a,group_b"
|
||||
user = self.find_or_create_user()
|
||||
self.add_permissions_to_user(
|
||||
user, target_uri=target_uri, permission_names=["read"]
|
||||
)
|
||||
response = client.get( # noqa: F841
|
||||
target_uri, headers=self.logged_in_headers(user)
|
||||
)
|
||||
self.add_permissions_to_user(user, target_uri=target_uri, permission_names=["read"])
|
||||
response = client.get(target_uri, headers=self.logged_in_headers(user)) # noqa: F841
|
||||
|
||||
def test_add_nested_group(
|
||||
self,
|
||||
|
@ -268,11 +254,7 @@ class TestNestedGroups(BaseTest):
|
|||
|
||||
target_uri = "/v1.0/process-groups/group_a"
|
||||
user = self.find_or_create_user()
|
||||
self.add_permissions_to_user(
|
||||
user, target_uri=target_uri, permission_names=["read"]
|
||||
)
|
||||
response = client.get( # noqa: F841
|
||||
target_uri, headers=self.logged_in_headers(user)
|
||||
)
|
||||
self.add_permissions_to_user(user, target_uri=target_uri, permission_names=["read"])
|
||||
response = client.get(target_uri, headers=self.logged_in_headers(user)) # noqa: F841
|
||||
|
||||
print("test_process_group_show: ")
|
||||
|
|
|
@ -24,9 +24,7 @@ class TestFlaskOpenId(BaseTest):
|
|||
response = client.get("/openid/.well-known/openid-configuration")
|
||||
discovered_urls = response.json
|
||||
assert "http://localhost/openid" == discovered_urls["issuer"]
|
||||
assert (
|
||||
"http://localhost/openid/auth" == discovered_urls["authorization_endpoint"]
|
||||
)
|
||||
assert "http://localhost/openid/auth" == discovered_urls["authorization_endpoint"]
|
||||
assert "http://localhost/openid/token" == discovered_urls["token_endpoint"]
|
||||
|
||||
def test_get_login_page(
|
||||
|
@ -70,8 +68,6 @@ class TestFlaskOpenId(BaseTest):
|
|||
assert "id_token" in response.json
|
||||
assert "refresh_token" in response.json
|
||||
|
||||
decoded_token = jwt.decode(
|
||||
response.json["id_token"], options={"verify_signature": False}
|
||||
)
|
||||
decoded_token = jwt.decode(response.json["id_token"], options={"verify_signature": False})
|
||||
assert "iss" in decoded_token
|
||||
assert "email" in decoded_token
|
||||
|
|
|
@ -61,9 +61,7 @@ class TestProcessApi(BaseTest):
|
|||
)
|
||||
assert response.status_code == 403
|
||||
|
||||
self.add_permissions_to_user(
|
||||
user, target_uri="/v1.0/process-groups", permission_names=["read"]
|
||||
)
|
||||
self.add_permissions_to_user(user, target_uri="/v1.0/process-groups", permission_names=["read"])
|
||||
response = client.get(
|
||||
"/v1.0/process-groups",
|
||||
headers=self.logged_in_headers(user),
|
||||
|
@ -84,9 +82,7 @@ class TestProcessApi(BaseTest):
|
|||
) -> None:
|
||||
"""Test_permissions_check."""
|
||||
user = self.find_or_create_user()
|
||||
self.add_permissions_to_user(
|
||||
user, target_uri="/v1.0/process-groups", permission_names=["read"]
|
||||
)
|
||||
self.add_permissions_to_user(user, target_uri="/v1.0/process-groups", permission_names=["read"])
|
||||
request_body = {
|
||||
"requests_to_check": {
|
||||
"/v1.0/process-groups": ["GET", "POST"],
|
||||
|
@ -120,9 +116,7 @@ class TestProcessApi(BaseTest):
|
|||
process_group_id = "test_process_group"
|
||||
process_group_display_name = "Test Process Group"
|
||||
# creates the group directory, and the json file
|
||||
self.create_process_group(
|
||||
client, with_super_admin_user, process_group_id, process_group_display_name
|
||||
)
|
||||
self.create_process_group(client, with_super_admin_user, process_group_id, process_group_display_name)
|
||||
|
||||
process_model_id = "sample"
|
||||
model_display_name = "Sample"
|
||||
|
@ -146,9 +140,7 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
# add bpmn file to the model
|
||||
bpmn_file_name = "sample.bpmn"
|
||||
bpmn_file_data_bytes = self.get_test_data_file_contents(
|
||||
bpmn_file_name, "sample"
|
||||
)
|
||||
bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, "sample")
|
||||
self.create_spec_file(
|
||||
client,
|
||||
process_model_id=process_model.id,
|
||||
|
@ -175,14 +167,10 @@ class TestProcessApi(BaseTest):
|
|||
process_group_description = "Test Process Group"
|
||||
process_model_id = "sample"
|
||||
process_model_identifier = f"{process_group_id}/{process_model_id}"
|
||||
self.create_process_group(
|
||||
client, with_super_admin_user, process_group_id, process_group_description
|
||||
)
|
||||
self.create_process_group(client, with_super_admin_user, process_group_id, process_group_description)
|
||||
|
||||
text = "Create a Bug Tracker process model "
|
||||
text += (
|
||||
"with a Bug Details form that collects summary, description, and priority"
|
||||
)
|
||||
text += "with a Bug Details form that collects summary, description, and priority"
|
||||
body = {"natural_language_text": text}
|
||||
self.create_process_model_with_api(
|
||||
client,
|
||||
|
@ -215,18 +203,12 @@ class TestProcessApi(BaseTest):
|
|||
assert os.path.exists(process_model_diagram)
|
||||
form_schema_json = os.path.join(process_model_path, "bug-details-schema.json")
|
||||
assert os.path.exists(form_schema_json)
|
||||
form_uischema_json = os.path.join(
|
||||
process_model_path, "bug-details-uischema.json"
|
||||
)
|
||||
form_uischema_json = os.path.join(process_model_path, "bug-details-uischema.json")
|
||||
assert os.path.exists(form_uischema_json)
|
||||
|
||||
process_instance_report = ProcessInstanceReportModel.query.filter_by(
|
||||
identifier="bug-tracker"
|
||||
).first()
|
||||
process_instance_report = ProcessInstanceReportModel.query.filter_by(identifier="bug-tracker").first()
|
||||
assert process_instance_report is not None
|
||||
report_column_accessors = [
|
||||
i["accessor"] for i in process_instance_report.report_metadata["columns"]
|
||||
]
|
||||
report_column_accessors = [i["accessor"] for i in process_instance_report.report_metadata["columns"]]
|
||||
expected_column_accessors = [
|
||||
"id",
|
||||
"process_model_display_name",
|
||||
|
@ -253,9 +235,7 @@ class TestProcessApi(BaseTest):
|
|||
process_model_identifier = f"{process_group_id}/{process_model_id}"
|
||||
initial_primary_process_id = "sample"
|
||||
terminal_primary_process_id = "new_process_id"
|
||||
self.create_process_group(
|
||||
client=client, user=with_super_admin_user, process_group_id=process_group_id
|
||||
)
|
||||
self.create_process_group(client=client, user=with_super_admin_user, process_group_id=process_group_id)
|
||||
|
||||
bpmn_file_name = f"{process_model_id}.bpmn"
|
||||
bpmn_file_source_directory = process_model_id
|
||||
|
@ -266,15 +246,11 @@ class TestProcessApi(BaseTest):
|
|||
)
|
||||
assert process_model.primary_process_id == initial_primary_process_id
|
||||
|
||||
bpmn_file_data_bytes = self.get_test_data_file_contents(
|
||||
bpmn_file_name, bpmn_file_source_directory
|
||||
)
|
||||
bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, bpmn_file_source_directory)
|
||||
bpmn_file_data_string = bpmn_file_data_bytes.decode("utf-8")
|
||||
old_string = f'bpmn:process id="{initial_primary_process_id}"'
|
||||
new_string = f'bpmn:process id="{terminal_primary_process_id}"'
|
||||
updated_bpmn_file_data_string = bpmn_file_data_string.replace(
|
||||
old_string, new_string
|
||||
)
|
||||
updated_bpmn_file_data_string = bpmn_file_data_string.replace(old_string, new_string)
|
||||
updated_bpmn_file_data_bytes = bytearray(updated_bpmn_file_data_string, "utf-8")
|
||||
data = {"file": (io.BytesIO(updated_bpmn_file_data_bytes), bpmn_file_name)}
|
||||
|
||||
|
@ -303,9 +279,7 @@ class TestProcessApi(BaseTest):
|
|||
process_group_description = "Test Process Group"
|
||||
process_model_id = "sample"
|
||||
process_model_identifier = f"{process_group_id}/{process_model_id}"
|
||||
self.create_process_group(
|
||||
client, with_super_admin_user, process_group_id, process_group_description
|
||||
)
|
||||
self.create_process_group(client, with_super_admin_user, process_group_id, process_group_description)
|
||||
self.create_process_model_with_api(
|
||||
client,
|
||||
process_model_id=process_model_identifier,
|
||||
|
@ -342,12 +316,8 @@ class TestProcessApi(BaseTest):
|
|||
process_model_identifier = f"{test_process_group_id}/{test_process_model_id}"
|
||||
modified_process_model_identifier = process_model_identifier.replace("/", ":")
|
||||
self.create_process_group(client, with_super_admin_user, test_process_group_id)
|
||||
self.create_process_model_with_api(
|
||||
client, process_model_identifier, user=with_super_admin_user
|
||||
)
|
||||
bpmn_file_data_bytes = self.get_test_data_file_contents(
|
||||
bpmn_file_name, bpmn_file_location
|
||||
)
|
||||
self.create_process_model_with_api(client, process_model_identifier, user=with_super_admin_user)
|
||||
bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, bpmn_file_location)
|
||||
self.create_spec_file(
|
||||
client=client,
|
||||
process_model_id=process_model_identifier,
|
||||
|
@ -390,9 +360,7 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_process_model_update."""
|
||||
self.create_process_group(
|
||||
client, with_super_admin_user, "test_process_group", "Test Process Group"
|
||||
)
|
||||
self.create_process_group(client, with_super_admin_user, "test_process_group", "Test Process Group")
|
||||
process_model_identifier = "test_process_group/make_cookies"
|
||||
self.create_process_model_with_api(
|
||||
client,
|
||||
|
@ -408,9 +376,7 @@ class TestProcessApi(BaseTest):
|
|||
process_model.display_name = "Updated Display Name"
|
||||
process_model.primary_file_name = "superduper.bpmn"
|
||||
process_model.primary_process_id = "superduper"
|
||||
process_model.metadata_extraction_paths = [
|
||||
{"key": "extraction1", "path": "path1"}
|
||||
]
|
||||
process_model.metadata_extraction_paths = [{"key": "extraction1", "path": "path1"}]
|
||||
|
||||
modified_process_model_identifier = process_model_identifier.replace("/", ":")
|
||||
response = client.put(
|
||||
|
@ -424,9 +390,7 @@ class TestProcessApi(BaseTest):
|
|||
assert response.json["display_name"] == "Updated Display Name"
|
||||
assert response.json["primary_file_name"] == "superduper.bpmn"
|
||||
assert response.json["primary_process_id"] == "superduper"
|
||||
assert response.json["metadata_extraction_paths"] == [
|
||||
{"key": "extraction1", "path": "path1"}
|
||||
]
|
||||
assert response.json["metadata_extraction_paths"] == [{"key": "extraction1", "path": "path1"}]
|
||||
|
||||
def test_process_model_list_all(
|
||||
self,
|
||||
|
@ -582,9 +546,7 @@ class TestProcessApi(BaseTest):
|
|||
assert response.json is not None
|
||||
# We should get 5 back, as one of the items in the cache is a decision.
|
||||
assert len(response.json) == 5
|
||||
simple_form = next(
|
||||
p for p in response.json if p["identifier"] == "Process_WithForm"
|
||||
)
|
||||
simple_form = next(p for p in response.json if p["identifier"] == "Process_WithForm")
|
||||
assert simple_form["display_name"] == "Process With Form"
|
||||
assert simple_form["process_model_id"] == "test_group_one/simple_form"
|
||||
assert simple_form["has_lanes"] is False
|
||||
|
@ -668,9 +630,7 @@ class TestProcessApi(BaseTest):
|
|||
group_id = "test_process_group"
|
||||
group_display_name = "Test Group"
|
||||
|
||||
self.create_process_group(
|
||||
client, with_super_admin_user, group_id, display_name=group_display_name
|
||||
)
|
||||
self.create_process_group(client, with_super_admin_user, group_id, display_name=group_display_name)
|
||||
process_group = ProcessModelService.get_process_group(group_id)
|
||||
|
||||
assert process_group.display_name == group_display_name
|
||||
|
@ -700,9 +660,7 @@ class TestProcessApi(BaseTest):
|
|||
for i in range(5):
|
||||
group_id = f"test_process_group_{i}"
|
||||
group_display_name = f"Test Group {i}"
|
||||
self.create_process_group(
|
||||
client, with_super_admin_user, group_id, display_name=group_display_name
|
||||
)
|
||||
self.create_process_group(client, with_super_admin_user, group_id, display_name=group_display_name)
|
||||
|
||||
# get all groups
|
||||
response = client.get(
|
||||
|
@ -775,9 +733,7 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_process_model_file_update."""
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client, with_super_admin_user
|
||||
)
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
|
||||
modified_process_model_id = process_model_identifier.replace("/", ":")
|
||||
|
||||
data = {"key1": "THIS DATA"}
|
||||
|
@ -801,9 +757,7 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_process_model_file_update."""
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client, with_super_admin_user
|
||||
)
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
|
||||
modified_process_model_id = process_model_identifier.replace("/", ":")
|
||||
|
||||
data = {"file": (io.BytesIO(b""), "random_fact.svg")}
|
||||
|
@ -831,9 +785,7 @@ class TestProcessApi(BaseTest):
|
|||
process_group_description = "Test Group"
|
||||
process_model_id = "random_fact"
|
||||
process_model_identifier = f"{process_group_id}/{process_model_id}"
|
||||
self.create_process_group(
|
||||
client, with_super_admin_user, process_group_id, process_group_description
|
||||
)
|
||||
self.create_process_group(client, with_super_admin_user, process_group_id, process_group_description)
|
||||
self.create_process_model_with_api(
|
||||
client,
|
||||
process_model_id=process_model_identifier,
|
||||
|
@ -879,16 +831,12 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_process_model_file_update."""
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client, with_super_admin_user
|
||||
)
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
|
||||
# self.create_spec_file(client, user=with_super_admin_user)
|
||||
|
||||
# process_model = load_test_spec("random_fact")
|
||||
bad_process_model_identifier = f"x{process_model_identifier}"
|
||||
modified_bad_process_model_identifier = bad_process_model_identifier.replace(
|
||||
"/", ":"
|
||||
)
|
||||
modified_bad_process_model_identifier = bad_process_model_identifier.replace("/", ":")
|
||||
response = client.delete(
|
||||
f"/v1.0/process-models/{modified_bad_process_model_identifier}/files/random_fact.svg",
|
||||
follow_redirects=True,
|
||||
|
@ -907,9 +855,7 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_process_model_file_update."""
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client, with_super_admin_user
|
||||
)
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
|
||||
modified_process_model_identifier = process_model_identifier.replace("/", ":")
|
||||
|
||||
response = client.delete(
|
||||
|
@ -929,12 +875,8 @@ class TestProcessApi(BaseTest):
|
|||
with_db_and_bpmn_file_cleanup: None,
|
||||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client, with_super_admin_user
|
||||
)
|
||||
process_model = ProcessModelService.get_process_model(
|
||||
process_model_id=process_model_identifier
|
||||
)
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
|
||||
process_model = ProcessModelService.get_process_model(process_model_id=process_model_identifier)
|
||||
modified_process_model_identifier = process_model_identifier.replace("/", ":")
|
||||
|
||||
response = client.delete(
|
||||
|
@ -955,9 +897,7 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_process_model_file_update."""
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client, with_super_admin_user
|
||||
)
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
|
||||
modified_process_model_identifier = process_model_identifier.replace("/", ":")
|
||||
|
||||
self.create_spec_file(
|
||||
|
@ -992,9 +932,7 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_get_file."""
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client, with_super_admin_user
|
||||
)
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
|
||||
modified_process_model_identifier = process_model_identifier.replace("/", ":")
|
||||
|
||||
response = client.get(
|
||||
|
@ -1014,9 +952,7 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_get_workflow_from_workflow_spec."""
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client, with_super_admin_user
|
||||
)
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
|
||||
modified_process_model_identifier = process_model_identifier.replace("/", ":")
|
||||
|
||||
response = client.post(
|
||||
|
@ -1071,9 +1007,7 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_get_process_group_when_found."""
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client, with_super_admin_user
|
||||
)
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
|
||||
process_group_id, process_model_id = os.path.split(process_model_identifier)
|
||||
|
||||
response = client.get(
|
||||
|
@ -1119,9 +1053,7 @@ class TestProcessApi(BaseTest):
|
|||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
assert response.json["id"] == "test_group_one/test_group_two"
|
||||
assert response.json["parent_groups"] == [
|
||||
{"display_name": "test_group_one", "id": "test_group_one"}
|
||||
]
|
||||
assert response.json["parent_groups"] == [{"display_name": "test_group_one", "id": "test_group_one"}]
|
||||
|
||||
def test_get_process_model_when_found(
|
||||
self,
|
||||
|
@ -1146,9 +1078,7 @@ class TestProcessApi(BaseTest):
|
|||
assert response.json["id"] == process_model_identifier
|
||||
assert len(response.json["files"]) == 1
|
||||
assert response.json["files"][0]["name"] == "random_fact.bpmn"
|
||||
assert response.json["parent_groups"] == [
|
||||
{"display_name": "test_group", "id": "test_group"}
|
||||
]
|
||||
assert response.json["parent_groups"] == [{"display_name": "test_group", "id": "test_group"}]
|
||||
|
||||
def test_get_process_model_when_not_found(
|
||||
self,
|
||||
|
@ -1180,9 +1110,7 @@ class TestProcessApi(BaseTest):
|
|||
"""Test_process_instance_create."""
|
||||
test_process_model_id = "runs_without_input/sample"
|
||||
headers = self.logged_in_headers(with_super_admin_user)
|
||||
response = self.create_process_instance_from_process_model_id_with_api(
|
||||
client, test_process_model_id, headers
|
||||
)
|
||||
response = self.create_process_instance_from_process_model_id_with_api(client, test_process_model_id, headers)
|
||||
assert response.json is not None
|
||||
assert response.json["updated_at_in_seconds"] is not None
|
||||
assert response.json["status"] == "not_started"
|
||||
|
@ -1244,9 +1172,7 @@ class TestProcessApi(BaseTest):
|
|||
process_group_id=process_group_id,
|
||||
process_model_id=process_model_id,
|
||||
)
|
||||
modified_process_model_identifier = (
|
||||
self.modify_process_identifier_for_path_param(process_model_identifier)
|
||||
)
|
||||
modified_process_model_identifier = self.modify_process_identifier_for_path_param(process_model_identifier)
|
||||
headers = self.logged_in_headers(with_super_admin_user)
|
||||
create_response = self.create_process_instance_from_process_model_id_with_api(
|
||||
client, process_model_identifier, headers
|
||||
|
@ -1264,9 +1190,7 @@ class TestProcessApi(BaseTest):
|
|||
assert show_response.json is not None
|
||||
assert show_response.status_code == 200
|
||||
file_system_root = FileSystemService.root_path()
|
||||
file_path = (
|
||||
f"{file_system_root}/{process_model_identifier}/{process_model_id}.bpmn"
|
||||
)
|
||||
file_path = f"{file_system_root}/{process_model_identifier}/{process_model_id}.bpmn"
|
||||
with open(file_path) as f_open:
|
||||
xml_file_contents = f_open.read()
|
||||
assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents
|
||||
|
@ -1287,13 +1211,9 @@ class TestProcessApi(BaseTest):
|
|||
process_model_id=process_model_id,
|
||||
bpmn_file_location="call_activity_nested",
|
||||
)
|
||||
spec_reference = SpecReferenceCache.query.filter_by(
|
||||
identifier="Level2b"
|
||||
).first()
|
||||
spec_reference = SpecReferenceCache.query.filter_by(identifier="Level2b").first()
|
||||
assert spec_reference
|
||||
modified_process_model_identifier = (
|
||||
self.modify_process_identifier_for_path_param(process_model_identifier)
|
||||
)
|
||||
modified_process_model_identifier = self.modify_process_identifier_for_path_param(process_model_identifier)
|
||||
headers = self.logged_in_headers(with_super_admin_user)
|
||||
create_response = self.create_process_instance_from_process_model_id_with_api(
|
||||
client, process_model_identifier, headers
|
||||
|
@ -1313,15 +1233,11 @@ class TestProcessApi(BaseTest):
|
|||
assert show_response.json is not None
|
||||
assert show_response.status_code == 200
|
||||
file_system_root = FileSystemService.root_path()
|
||||
process_instance_file_path = (
|
||||
f"{file_system_root}/{process_model_identifier}/{process_model_id}.bpmn"
|
||||
)
|
||||
process_instance_file_path = f"{file_system_root}/{process_model_identifier}/{process_model_id}.bpmn"
|
||||
with open(process_instance_file_path) as f_open:
|
||||
xml_file_contents = f_open.read()
|
||||
assert show_response.json["bpmn_xml_file_contents"] != xml_file_contents
|
||||
spec_reference_file_path = os.path.join(
|
||||
file_system_root, spec_reference.relative_path
|
||||
)
|
||||
spec_reference_file_path = os.path.join(file_system_root, spec_reference.relative_path)
|
||||
with open(spec_reference_file_path) as f_open:
|
||||
xml_file_contents = f_open.read()
|
||||
assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents
|
||||
|
@ -1366,9 +1282,7 @@ class TestProcessApi(BaseTest):
|
|||
assert json_data
|
||||
assert json_data["status"] == "complete"
|
||||
process_instance_id = json_data["id"]
|
||||
process_instance = ProcessInstanceModel.query.filter_by(
|
||||
id=process_instance_id
|
||||
).first()
|
||||
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first()
|
||||
assert process_instance
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
|
@ -1418,9 +1332,7 @@ class TestProcessApi(BaseTest):
|
|||
)
|
||||
assert response.json is not None
|
||||
|
||||
process_instance = ProcessInstanceModel.query.filter_by(
|
||||
id=process_instance_id
|
||||
).first()
|
||||
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first()
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.do_engine_steps(save=True)
|
||||
task = processor.get_all_user_tasks()[0]
|
||||
|
@ -1439,18 +1351,14 @@ class TestProcessApi(BaseTest):
|
|||
f"/v1.0/messages/{message_model_identifier}",
|
||||
content_type="application/json",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
data=json.dumps(
|
||||
{"payload": payload, "process_instance_id": process_instance_id}
|
||||
),
|
||||
data=json.dumps({"payload": payload, "process_instance_id": process_instance_id}),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
json_data = response.json
|
||||
assert json_data
|
||||
assert json_data["status"] == "complete"
|
||||
process_instance_id = json_data["id"]
|
||||
process_instance = ProcessInstanceModel.query.filter_by(
|
||||
id=process_instance_id
|
||||
).first()
|
||||
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first()
|
||||
assert process_instance
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
|
@ -1495,9 +1403,7 @@ class TestProcessApi(BaseTest):
|
|||
assert response.json is not None
|
||||
process_instance_id = response.json["id"]
|
||||
|
||||
process_instance = ProcessInstanceModel.query.filter_by(
|
||||
id=process_instance_id
|
||||
).first()
|
||||
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first()
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.do_engine_steps(save=True)
|
||||
task = processor.get_all_user_tasks()[0]
|
||||
|
@ -1518,9 +1424,7 @@ class TestProcessApi(BaseTest):
|
|||
f"/v1.0/messages/{message_model_identifier}",
|
||||
content_type="application/json",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
data=json.dumps(
|
||||
{"payload": payload, "process_instance_id": process_instance_id}
|
||||
),
|
||||
data=json.dumps({"payload": payload, "process_instance_id": process_instance_id}),
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert response.json
|
||||
|
@ -1539,9 +1443,7 @@ class TestProcessApi(BaseTest):
|
|||
assert json_data
|
||||
assert json_data["status"] == "complete"
|
||||
process_instance_id = json_data["id"]
|
||||
process_instance = ProcessInstanceModel.query.filter_by(
|
||||
id=process_instance_id
|
||||
).first()
|
||||
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first()
|
||||
assert process_instance
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
process_instance_data = processor.get_data()
|
||||
|
@ -1553,9 +1455,7 @@ class TestProcessApi(BaseTest):
|
|||
f"/v1.0/messages/{message_model_identifier}",
|
||||
content_type="application/json",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
data=json.dumps(
|
||||
{"payload": payload, "process_instance_id": process_instance_id}
|
||||
),
|
||||
data=json.dumps({"payload": payload, "process_instance_id": process_instance_id}),
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert response.json
|
||||
|
@ -1605,9 +1505,7 @@ class TestProcessApi(BaseTest):
|
|||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
|
||||
process_instance = ProcessInstanceModel.query.filter_by(
|
||||
id=process_instance_id
|
||||
).first()
|
||||
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first()
|
||||
assert process_instance
|
||||
assert process_instance.status == "terminated"
|
||||
|
||||
|
@ -1687,9 +1585,7 @@ class TestProcessApi(BaseTest):
|
|||
assert response.json["next_task"] is not None
|
||||
|
||||
human_tasks = (
|
||||
db.session.query(HumanTaskModel)
|
||||
.filter(HumanTaskModel.process_instance_id == process_instance_id)
|
||||
.all()
|
||||
db.session.query(HumanTaskModel).filter(HumanTaskModel.process_instance_id == process_instance_id).all()
|
||||
)
|
||||
assert len(human_tasks) == 1
|
||||
human_task = human_tasks[0]
|
||||
|
@ -1699,10 +1595,7 @@ class TestProcessApi(BaseTest):
|
|||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
assert (
|
||||
response.json["form_schema"]["definitions"]["Color"]["anyOf"][1]["title"]
|
||||
== "Green"
|
||||
)
|
||||
assert response.json["form_schema"]["definitions"]["Color"]["anyOf"][1]["title"] == "Green"
|
||||
|
||||
# if you set this in task data:
|
||||
# form_ui_hidden_fields = ["veryImportantFieldButOnlySometimes", "building.floor"]
|
||||
|
@ -1732,9 +1625,7 @@ class TestProcessApi(BaseTest):
|
|||
)
|
||||
|
||||
headers = self.logged_in_headers(with_super_admin_user)
|
||||
self.create_process_instance_from_process_model_id_with_api(
|
||||
client, process_model_identifier, headers
|
||||
)
|
||||
self.create_process_instance_from_process_model_id_with_api(client, process_model_identifier, headers)
|
||||
|
||||
response = client.get(
|
||||
"/v1.0/process-instances",
|
||||
|
@ -1749,10 +1640,7 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
process_instance_dict = response.json["results"][0]
|
||||
assert type(process_instance_dict["id"]) is int
|
||||
assert (
|
||||
process_instance_dict["process_model_identifier"]
|
||||
== process_model_identifier
|
||||
)
|
||||
assert process_instance_dict["process_model_identifier"] == process_model_identifier
|
||||
assert type(process_instance_dict["start_in_seconds"]) is int
|
||||
assert process_instance_dict["start_in_seconds"] > 0
|
||||
assert process_instance_dict["end_in_seconds"] is None
|
||||
|
@ -1779,21 +1667,11 @@ class TestProcessApi(BaseTest):
|
|||
bpmn_file_location=bpmn_file_location,
|
||||
)
|
||||
headers = self.logged_in_headers(with_super_admin_user)
|
||||
self.create_process_instance_from_process_model_id_with_api(
|
||||
client, process_model_identifier, headers
|
||||
)
|
||||
self.create_process_instance_from_process_model_id_with_api(
|
||||
client, process_model_identifier, headers
|
||||
)
|
||||
self.create_process_instance_from_process_model_id_with_api(
|
||||
client, process_model_identifier, headers
|
||||
)
|
||||
self.create_process_instance_from_process_model_id_with_api(
|
||||
client, process_model_identifier, headers
|
||||
)
|
||||
self.create_process_instance_from_process_model_id_with_api(
|
||||
client, process_model_identifier, headers
|
||||
)
|
||||
self.create_process_instance_from_process_model_id_with_api(client, process_model_identifier, headers)
|
||||
self.create_process_instance_from_process_model_id_with_api(client, process_model_identifier, headers)
|
||||
self.create_process_instance_from_process_model_id_with_api(client, process_model_identifier, headers)
|
||||
self.create_process_instance_from_process_model_id_with_api(client, process_model_identifier, headers)
|
||||
self.create_process_instance_from_process_model_id_with_api(client, process_model_identifier, headers)
|
||||
|
||||
response = client.get(
|
||||
"/v1.0/process-instances?per_page=2&page=3",
|
||||
|
@ -2095,9 +1973,7 @@ class TestProcessApi(BaseTest):
|
|||
) -> Any:
|
||||
"""Setup_testing_instance."""
|
||||
headers = self.logged_in_headers(with_super_admin_user)
|
||||
response = self.create_process_instance_from_process_model_id_with_api(
|
||||
client, process_model_id, headers
|
||||
)
|
||||
response = self.create_process_instance_from_process_model_id_with_api(client, process_model_id, headers)
|
||||
process_instance = response.json
|
||||
assert isinstance(process_instance, dict)
|
||||
process_instance_id = process_instance["id"]
|
||||
|
@ -2124,15 +2000,9 @@ class TestProcessApi(BaseTest):
|
|||
bpmn_file_location=bpmn_file_location,
|
||||
)
|
||||
|
||||
process_instance_id = self.setup_testing_instance(
|
||||
client, process_model_identifier, with_super_admin_user
|
||||
)
|
||||
process_instance_id = self.setup_testing_instance(client, process_model_identifier, with_super_admin_user)
|
||||
|
||||
process = (
|
||||
db.session.query(ProcessInstanceModel)
|
||||
.filter(ProcessInstanceModel.id == process_instance_id)
|
||||
.first()
|
||||
)
|
||||
process = db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.id == process_instance_id).first()
|
||||
assert process is not None
|
||||
assert process.status == "not_started"
|
||||
|
||||
|
@ -2144,16 +2014,9 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
api_error = json.loads(response.get_data(as_text=True))
|
||||
assert api_error["error_code"] == "task_error"
|
||||
assert (
|
||||
'TypeError:can only concatenate str (not "int") to str'
|
||||
in api_error["message"]
|
||||
)
|
||||
assert 'TypeError:can only concatenate str (not "int") to str' in api_error["message"]
|
||||
|
||||
process = (
|
||||
db.session.query(ProcessInstanceModel)
|
||||
.filter(ProcessInstanceModel.id == process_instance_id)
|
||||
.first()
|
||||
)
|
||||
process = db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.id == process_instance_id).first()
|
||||
assert process is not None
|
||||
assert process.status == "error"
|
||||
|
||||
|
@ -2178,20 +2041,14 @@ class TestProcessApi(BaseTest):
|
|||
bpmn_file_location=bpmn_file_location,
|
||||
)
|
||||
|
||||
process_instance_id = self.setup_testing_instance(
|
||||
client, process_model_identifier, with_super_admin_user
|
||||
)
|
||||
process_instance_id = self.setup_testing_instance(client, process_model_identifier, with_super_admin_user)
|
||||
process_model = ProcessModelService.get_process_model(process_model_identifier)
|
||||
ProcessModelService.update_process_model(
|
||||
process_model,
|
||||
{"fault_or_suspend_on_exception": NotificationType.suspend.value},
|
||||
)
|
||||
|
||||
process = (
|
||||
db.session.query(ProcessInstanceModel)
|
||||
.filter(ProcessInstanceModel.id == process_instance_id)
|
||||
.first()
|
||||
)
|
||||
process = db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.id == process_instance_id).first()
|
||||
assert process is not None
|
||||
assert process.status == "not_started"
|
||||
|
||||
|
@ -2201,11 +2058,7 @@ class TestProcessApi(BaseTest):
|
|||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
process = (
|
||||
db.session.query(ProcessInstanceModel)
|
||||
.filter(ProcessInstanceModel.id == process_instance_id)
|
||||
.first()
|
||||
)
|
||||
process = db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.id == process_instance_id).first()
|
||||
assert process is not None
|
||||
assert process.status == "suspended"
|
||||
|
||||
|
@ -2238,9 +2091,7 @@ class TestProcessApi(BaseTest):
|
|||
assert response.status_code == 400
|
||||
assert process_instance.status == "error"
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
spiff_task = processor.get_task_by_bpmn_identifier(
|
||||
"script_task_two", processor.bpmn_process_instance
|
||||
)
|
||||
spiff_task = processor.get_task_by_bpmn_identifier("script_task_two", processor.bpmn_process_instance)
|
||||
assert spiff_task is not None
|
||||
assert spiff_task.data == {"my_var": "THE VAR"}
|
||||
|
||||
|
@ -2338,13 +2189,8 @@ class TestProcessApi(BaseTest):
|
|||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
assert (
|
||||
len(response.json["results"]) == 2
|
||||
) # Two messages, one is the completed receive, the other is new send
|
||||
assert (
|
||||
response.json["results"][0]["process_instance_id"]
|
||||
== process_instance_id_one
|
||||
)
|
||||
assert len(response.json["results"]) == 2 # Two messages, one is the completed receive, the other is new send
|
||||
assert response.json["results"][0]["process_instance_id"] == process_instance_id_one
|
||||
|
||||
response = client.get(
|
||||
f"/v1.0/messages?process_instance_id={process_instance_id_two}",
|
||||
|
@ -2353,10 +2199,7 @@ class TestProcessApi(BaseTest):
|
|||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
assert len(response.json["results"]) == 2
|
||||
assert (
|
||||
response.json["results"][0]["process_instance_id"]
|
||||
== process_instance_id_two
|
||||
)
|
||||
assert response.json["results"][0]["process_instance_id"] == process_instance_id_two
|
||||
|
||||
response = client.get(
|
||||
"/v1.0/messages",
|
||||
|
@ -2604,9 +2447,7 @@ class TestProcessApi(BaseTest):
|
|||
bpmn_file_location=bpmn_file_location,
|
||||
)
|
||||
|
||||
bpmn_file_data_bytes = self.get_test_data_file_contents(
|
||||
bpmn_file_name, bpmn_file_location
|
||||
)
|
||||
bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, bpmn_file_location)
|
||||
self.create_spec_file(
|
||||
client=client,
|
||||
process_model_id=process_model_identifier,
|
||||
|
@ -2628,27 +2469,21 @@ class TestProcessApi(BaseTest):
|
|||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
process_instance = ProcessInstanceService().get_process_instance(process_instance_id)
|
||||
assert process_instance.status == "user_input_required"
|
||||
|
||||
client.post(
|
||||
f"/v1.0/process-instance-suspend/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
process_instance = ProcessInstanceService().get_process_instance(process_instance_id)
|
||||
assert process_instance.status == "suspended"
|
||||
|
||||
response = client.post(
|
||||
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
process_instance = ProcessInstanceService().get_process_instance(process_instance_id)
|
||||
assert process_instance.status == "suspended"
|
||||
assert response.status_code == 400
|
||||
|
||||
|
@ -2657,9 +2492,7 @@ class TestProcessApi(BaseTest):
|
|||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
process_instance = ProcessInstanceService().get_process_instance(process_instance_id)
|
||||
assert process_instance.status == "waiting"
|
||||
|
||||
def test_script_unit_test_run(
|
||||
|
@ -2683,9 +2516,7 @@ class TestProcessApi(BaseTest):
|
|||
bpmn_file_location=bpmn_file_location,
|
||||
)
|
||||
|
||||
bpmn_file_data_bytes = self.get_test_data_file_contents(
|
||||
bpmn_file_name, bpmn_file_location
|
||||
)
|
||||
bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, bpmn_file_location)
|
||||
self.create_spec_file(
|
||||
client=client,
|
||||
process_model_id=process_model_identifier,
|
||||
|
@ -2741,9 +2572,7 @@ class TestProcessApi(BaseTest):
|
|||
bpmn_file_location=bpmn_file_location,
|
||||
)
|
||||
|
||||
bpmn_file_data_bytes = self.get_test_data_file_contents(
|
||||
bpmn_file_name, bpmn_file_location
|
||||
)
|
||||
bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, bpmn_file_location)
|
||||
self.create_spec_file(
|
||||
client=client,
|
||||
process_model_id=process_model_identifier,
|
||||
|
@ -2817,9 +2646,7 @@ class TestProcessApi(BaseTest):
|
|||
bpmn_file_location=bpmn_file_location,
|
||||
)
|
||||
|
||||
bpmn_file_data_bytes = self.get_test_data_file_contents(
|
||||
bpmn_file_name, bpmn_file_location
|
||||
)
|
||||
bpmn_file_data_bytes = self.get_test_data_file_contents(bpmn_file_name, bpmn_file_location)
|
||||
self.create_spec_file(
|
||||
client=client,
|
||||
process_model_id=process_model_identifier,
|
||||
|
@ -2868,16 +2695,12 @@ class TestProcessApi(BaseTest):
|
|||
)
|
||||
assert response.json["status"] == "suspended"
|
||||
|
||||
def setup_initial_groups_for_move_tests(
|
||||
self, client: FlaskClient, with_super_admin_user: UserModel
|
||||
) -> None:
|
||||
def setup_initial_groups_for_move_tests(self, client: FlaskClient, with_super_admin_user: UserModel) -> None:
|
||||
"""Setup_initial_groups_for_move_tests."""
|
||||
groups = ["group_a", "group_b", "group_b/group_bb"]
|
||||
# setup initial groups
|
||||
for group in groups:
|
||||
self.create_process_group(
|
||||
client, with_super_admin_user, group, display_name=group
|
||||
)
|
||||
self.create_process_group(client, with_super_admin_user, group, display_name=group)
|
||||
# make sure initial groups exist
|
||||
for group in groups:
|
||||
persisted = ProcessModelService.get_process_group(group)
|
||||
|
@ -2913,9 +2736,7 @@ class TestProcessApi(BaseTest):
|
|||
# move model to `group_b/group_bb`
|
||||
new_location = "group_b/group_bb"
|
||||
new_process_model_path = f"{new_location}/{process_model_id}"
|
||||
modified_original_process_model_id = original_process_model_path.replace(
|
||||
"/", ":"
|
||||
)
|
||||
modified_original_process_model_id = original_process_model_path.replace("/", ":")
|
||||
|
||||
response = client.put(
|
||||
f"/v1.0/process-models/{modified_original_process_model_id}/move?new_location={new_location}",
|
||||
|
@ -2930,9 +2751,7 @@ class TestProcessApi(BaseTest):
|
|||
assert e.value.args[0] == "process_model_not_found"
|
||||
|
||||
# make sure the new model does exist
|
||||
new_process_model = ProcessModelService.get_process_model(
|
||||
new_process_model_path
|
||||
)
|
||||
new_process_model = ProcessModelService.get_process_model(new_process_model_path)
|
||||
assert new_process_model is not None
|
||||
assert new_process_model.id == new_process_model_path
|
||||
|
||||
|
@ -2950,9 +2769,7 @@ class TestProcessApi(BaseTest):
|
|||
sub_group_id = "sub_group"
|
||||
original_location = "group_a"
|
||||
original_sub_path = f"{original_location}/{sub_group_id}"
|
||||
self.create_process_group(
|
||||
client, with_super_admin_user, original_sub_path, display_name=sub_group_id
|
||||
)
|
||||
self.create_process_group(client, with_super_admin_user, original_sub_path, display_name=sub_group_id)
|
||||
# make sure original subgroup exists
|
||||
persisted = ProcessModelService.get_process_group(original_sub_path)
|
||||
assert persisted is not None
|
||||
|
@ -3111,9 +2928,7 @@ class TestProcessApi(BaseTest):
|
|||
) -> None:
|
||||
"""Test_can_get_process_instance_list_with_report_metadata."""
|
||||
process_model = load_test_spec(
|
||||
process_model_id=(
|
||||
"save_process_instance_metadata/save_process_instance_metadata"
|
||||
),
|
||||
process_model_id="save_process_instance_metadata/save_process_instance_metadata",
|
||||
bpmn_file_name="save_process_instance_metadata.bpmn",
|
||||
process_model_source_directory="save_process_instance_metadata",
|
||||
)
|
||||
|
@ -3172,21 +2987,13 @@ class TestProcessApi(BaseTest):
|
|||
user_one = self.create_user_with_permission(username="user_one")
|
||||
|
||||
process_model = load_test_spec(
|
||||
process_model_id=(
|
||||
"save_process_instance_metadata/save_process_instance_metadata"
|
||||
),
|
||||
process_model_id="save_process_instance_metadata/save_process_instance_metadata",
|
||||
bpmn_file_name="save_process_instance_metadata.bpmn",
|
||||
process_model_source_directory="save_process_instance_metadata",
|
||||
)
|
||||
self.create_process_instance_from_process_model(
|
||||
process_model=process_model, user=user_one
|
||||
)
|
||||
self.create_process_instance_from_process_model(
|
||||
process_model=process_model, user=user_one
|
||||
)
|
||||
self.create_process_instance_from_process_model(
|
||||
process_model=process_model, user=with_super_admin_user
|
||||
)
|
||||
self.create_process_instance_from_process_model(process_model=process_model, user=user_one)
|
||||
self.create_process_instance_from_process_model(process_model=process_model, user=user_one)
|
||||
self.create_process_instance_from_process_model(process_model=process_model, user=with_super_admin_user)
|
||||
|
||||
dne_report_metadata = {
|
||||
"columns": [
|
||||
|
@ -3224,12 +3031,10 @@ class TestProcessApi(BaseTest):
|
|||
report_metadata=dne_report_metadata,
|
||||
user=user_one,
|
||||
)
|
||||
process_instance_report_user_one = (
|
||||
ProcessInstanceReportModel.create_with_attributes(
|
||||
identifier="user_one_report",
|
||||
report_metadata=user_one_report_metadata,
|
||||
user=user_one,
|
||||
)
|
||||
process_instance_report_user_one = ProcessInstanceReportModel.create_with_attributes(
|
||||
identifier="user_one_report",
|
||||
report_metadata=user_one_report_metadata,
|
||||
user=user_one,
|
||||
)
|
||||
|
||||
response = client.get(
|
||||
|
@ -3239,14 +3044,8 @@ class TestProcessApi(BaseTest):
|
|||
assert response.json is not None
|
||||
assert response.status_code == 200
|
||||
assert len(response.json["results"]) == 2
|
||||
assert (
|
||||
response.json["results"][0]["process_initiator_username"]
|
||||
== user_one.username
|
||||
)
|
||||
assert (
|
||||
response.json["results"][1]["process_initiator_username"]
|
||||
== user_one.username
|
||||
)
|
||||
assert response.json["results"][0]["process_initiator_username"] == user_one.username
|
||||
assert response.json["results"][1]["process_initiator_username"] == user_one.username
|
||||
|
||||
response = client.get(
|
||||
f"/v1.0/process-instances?report_identifier={process_instance_report_dne.identifier}",
|
||||
|
@ -3265,9 +3064,7 @@ class TestProcessApi(BaseTest):
|
|||
) -> None:
|
||||
"""Test_can_get_process_instance_list_with_report_metadata."""
|
||||
process_model = load_test_spec(
|
||||
process_model_id=(
|
||||
"save_process_instance_metadata/save_process_instance_metadata"
|
||||
),
|
||||
process_model_id="save_process_instance_metadata/save_process_instance_metadata",
|
||||
bpmn_file_name="save_process_instance_metadata.bpmn",
|
||||
process_model_source_directory="save_process_instance_metadata",
|
||||
)
|
||||
|
@ -3317,9 +3114,7 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_process_instance_list_can_order_by_metadata."""
|
||||
self.create_process_group(
|
||||
client, with_super_admin_user, "test_group", "test_group"
|
||||
)
|
||||
self.create_process_group(client, with_super_admin_user, "test_group", "test_group")
|
||||
process_model = load_test_spec(
|
||||
"test_group/hello_world",
|
||||
process_model_source_directory="nested-task-data-structure",
|
||||
|
@ -3333,15 +3128,11 @@ class TestProcessApi(BaseTest):
|
|||
},
|
||||
)
|
||||
|
||||
process_instance_one = self.create_process_instance_from_process_model(
|
||||
process_model
|
||||
)
|
||||
process_instance_one = self.create_process_instance_from_process_model(process_model)
|
||||
processor = ProcessInstanceProcessor(process_instance_one)
|
||||
processor.do_engine_steps(save=True)
|
||||
assert process_instance_one.status == "complete"
|
||||
process_instance_two = self.create_process_instance_from_process_model(
|
||||
process_model
|
||||
)
|
||||
process_instance_two = self.create_process_instance_from_process_model(process_model)
|
||||
processor = ProcessInstanceProcessor(process_instance_two)
|
||||
processor.do_engine_steps(save=True)
|
||||
assert process_instance_two.status == "complete"
|
||||
|
@ -3405,9 +3196,7 @@ class TestProcessApi(BaseTest):
|
|||
"test_group/data_object_test",
|
||||
process_model_source_directory="data_object_test",
|
||||
)
|
||||
process_instance_one = self.create_process_instance_from_process_model(
|
||||
process_model
|
||||
)
|
||||
process_instance_one = self.create_process_instance_from_process_model(process_model)
|
||||
processor = ProcessInstanceProcessor(process_instance_one)
|
||||
processor.do_engine_steps(save=True)
|
||||
assert process_instance_one.status == "user_input_required"
|
||||
|
|
|
@ -18,21 +18,15 @@ class TestProcessInstancesController(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_user_search_returns_a_user."""
|
||||
user_one = self.create_user_with_permission(
|
||||
username="user_one", target_uri="/process-instances/find-by-id/*"
|
||||
)
|
||||
user_two = self.create_user_with_permission(
|
||||
username="user_two", target_uri="/process-instances/find-by-id/*"
|
||||
)
|
||||
user_one = self.create_user_with_permission(username="user_one", target_uri="/process-instances/find-by-id/*")
|
||||
user_two = self.create_user_with_permission(username="user_two", target_uri="/process-instances/find-by-id/*")
|
||||
|
||||
process_model = load_test_spec(
|
||||
process_model_id="group/sample",
|
||||
bpmn_file_name="sample.bpmn",
|
||||
process_model_source_directory="sample",
|
||||
)
|
||||
process_instance = self.create_process_instance_from_process_model(
|
||||
process_model=process_model, user=user_one
|
||||
)
|
||||
process_instance = self.create_process_instance_from_process_model(process_model=process_model, user=user_one)
|
||||
|
||||
response = client.get(
|
||||
f"/v1.0/process-instances/find-by-id/{process_instance.id}",
|
||||
|
|
|
@ -32,9 +32,7 @@ class SecretServiceTestHelpers(BaseTest):
|
|||
"""Add_test_secret."""
|
||||
return SecretService().add_secret(self.test_key, self.test_value, user.id)
|
||||
|
||||
def add_test_process(
|
||||
self, client: FlaskClient, user: UserModel
|
||||
) -> ProcessModelInfo:
|
||||
def add_test_process(self, client: FlaskClient, user: UserModel) -> ProcessModelInfo:
|
||||
"""Add_test_process."""
|
||||
self.create_process_group(
|
||||
client,
|
||||
|
@ -42,9 +40,7 @@ class SecretServiceTestHelpers(BaseTest):
|
|||
self.test_process_group_id,
|
||||
display_name=self.test_process_group_display_name,
|
||||
)
|
||||
process_model_identifier = (
|
||||
f"{self.test_process_group_id}/{self.test_process_model_id}"
|
||||
)
|
||||
process_model_identifier = f"{self.test_process_group_id}/{self.test_process_model_id}"
|
||||
self.create_process_model_with_api(
|
||||
client,
|
||||
process_model_id=process_model_identifier,
|
||||
|
@ -52,9 +48,7 @@ class SecretServiceTestHelpers(BaseTest):
|
|||
process_model_description=self.test_process_model_description,
|
||||
user=user,
|
||||
)
|
||||
process_model_info = ProcessModelService.get_process_model(
|
||||
process_model_identifier
|
||||
)
|
||||
process_model_info = ProcessModelService.get_process_model(process_model_identifier)
|
||||
return process_model_info
|
||||
|
||||
|
||||
|
@ -124,14 +118,10 @@ class TestSecretService(SecretServiceTestHelpers):
|
|||
secret = SecretService.get_secret(self.test_key)
|
||||
assert secret
|
||||
assert SecretService._decrypt(secret.value) == self.test_value
|
||||
SecretService.update_secret(
|
||||
self.test_key, "new_secret_value", with_super_admin_user.id
|
||||
)
|
||||
SecretService.update_secret(self.test_key, "new_secret_value", with_super_admin_user.id)
|
||||
new_secret = SecretService.get_secret(self.test_key)
|
||||
assert new_secret
|
||||
assert (
|
||||
SecretService._decrypt(new_secret.value) == "new_secret_value"
|
||||
) # noqa: S105
|
||||
assert SecretService._decrypt(new_secret.value) == "new_secret_value" # noqa: S105
|
||||
|
||||
def test_update_secret_bad_secret_fails(
|
||||
self,
|
||||
|
@ -143,9 +133,7 @@ class TestSecretService(SecretServiceTestHelpers):
|
|||
"""Test_update_secret_bad_secret_fails."""
|
||||
secret = self.add_test_secret(with_super_admin_user)
|
||||
with pytest.raises(ApiError) as ae:
|
||||
SecretService.update_secret(
|
||||
secret.key + "x", "some_new_value", with_super_admin_user.id
|
||||
)
|
||||
SecretService.update_secret(secret.key + "x", "some_new_value", with_super_admin_user.id)
|
||||
assert "Resource does not exist" in ae.value.message
|
||||
assert ae.value.error_code == "update_secret_error"
|
||||
|
||||
|
@ -253,9 +241,7 @@ class TestSecretServiceApi(SecretServiceTestHelpers):
|
|||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
secret_model = SecretModel.query.filter(
|
||||
SecretModel.key == self.test_key
|
||||
).first()
|
||||
secret_model = SecretModel.query.filter(SecretModel.key == self.test_key).first()
|
||||
assert SecretService._decrypt(secret_model.value) == "new_secret_value"
|
||||
|
||||
def test_delete_secret(
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue