Merge remote-tracking branch 'origin/main' into feature/fix_process_instance_rewind

This commit is contained in:
jasquat 2023-03-30 07:51:05 -04:00
commit edc0ea83fa
19 changed files with 1214 additions and 3774 deletions

View File

@ -16,10 +16,7 @@ jobs:
fail-fast: false
matrix:
include:
# FIXME: https://github.com/mysql/mysql-connector-python/pull/86
# put back when poetry update protobuf mysql-connector-python updates protobuf
# right now mysql is forcing protobuf to version 3
# - { python: "3.11", os: "ubuntu-latest", session: "safety" }
- { python: "3.11", os: "ubuntu-latest", session: "safety" }
- { python: "3.11", os: "ubuntu-latest", session: "mypy" }
- { python: "3.10", os: "ubuntu-latest", session: "mypy" }
- { python: "3.9", os: "ubuntu-latest", session: "mypy" }
@ -176,6 +173,19 @@ jobs:
name: logs-${{matrix.python}}-${{matrix.os}}-${{matrix.database}}
path: "./log/*.log"
# burnettk created an account at https://app.snyk.io/org/kevin-jfx
# and added his SNYK_TOKEN secret under the spiff-arena repo.
snyk:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- name: Run Snyk to check for vulnerabilities
uses: snyk/actions/python@master
with:
args: spiffworkflow-backend
env:
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
run_pre_commit_checks:
runs-on: ubuntu-latest
defaults:

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
pyrightconfig.json
.idea/
t
.dccache

2831
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -13,71 +13,8 @@ classifiers = [
[tool.poetry.dependencies]
python = ">=3.11,<3.12"
click = "^8.0.1"
flask = "2.2.2"
flask-admin = "*"
flask-bcrypt = "*"
flask-cors = "*"
flask-mail = "*"
flask-marshmallow = "*"
flask-migrate = "*"
flask-restful = "*"
werkzeug = "*"
# go back to main once https://github.com/sartography/SpiffWorkflow/pull/241 is merged
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
# SpiffWorkflow = {develop = true, path = "/Users/kevin/projects/github/sartography/SpiffWorkflow"}
# SpiffWorkflow = {develop = true, path = "/home/jason/projects/github/sartography/SpiffWorkflow"}
sentry-sdk = "^1.10"
sphinx-autoapi = "^2.0"
# flask-bpmn = {develop = true, path = "/home/jason/projects/github/sartography/flask-bpmn"}
# flask-bpmn = {develop = true, path = "/Users/kevin/projects/github/sartography/flask-bpmn"}
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"}
mysql-connector-python = "^8.0.29"
pytest-flask = "^1.2.0"
pytest-flask-sqlalchemy = "^1.1.0"
psycopg2 = "^2.9.3"
typing-extensions = "^4.4.0"
connexion = {extras = [ "swagger-ui",], version = "^2"}
lxml = "^4.9.1"
marshmallow-enum = "^1.5.1"
marshmallow-sqlalchemy = "^0.28.0"
PyJWT = "^2.6.0"
gunicorn = "^20.1.0"
python-keycloak = "^2.5.0"
APScheduler = "^3.9.1"
Jinja2 = "^3.1.2"
RestrictedPython = "^6.0"
Flask-SQLAlchemy = "^3"
# type hinting stuff
# these need to be in the normal (non dev-dependencies) section
# because if not then poetry export won't have them and nox -s mypy --pythons 3.10
# will fail
types-Werkzeug = "^1.0.9"
types-PyYAML = "^6.0.12"
types-Flask = "^1.1.6"
types-requests = "^2.28.6"
types-pytz = "^2022.1.1"
# https://github.com/dropbox/sqlalchemy-stubs/pull/251
# someday get off github
# sqlalchemy-stubs = "^0.4"
# sqlalchemy-stubs = { git = "https://github.com/dropbox/sqlalchemy-stubs.git", rev = "master" }
# sqlalchemy-stubs = {develop = true, path = "/Users/kevin/projects/github/sqlalchemy-stubs"}
# for now use my fork
sqlalchemy-stubs = { git = "https://github.com/burnettk/sqlalchemy-stubs.git", rev = "scoped-session-delete" }
simplejson = "^3.17.6"
[tool.poetry.dev-dependencies]
pytest = "^7.1.2"
coverage = {extras = ["toml"], version = "^6.1"}
safety = "^2.3.1"
mypy = ">=0.961"
typeguard = "^2.13.2"
xdoctest = {extras = ["colors"], version = "^1.0.1"}
sphinx = "^5.0.2"
sphinx-autobuild = ">=2021.3.14"
pre-commit = "^2.20.0"
flake8 = "^4.0.1"
black = ">=21.10b0"
@ -89,71 +26,9 @@ bandit = "1.7.2"
flake8-bugbear = "^22.10.25"
flake8-docstrings = "^1.6.0"
flake8-rst-docstrings = "^0.2.7"
# flask-sqlalchemy-stubs = "^0.2"
pep8-naming = "^0.13.2"
darglint = "^1.8.1"
reorder-python-imports = "^3.9.0"
pre-commit-hooks = "^4.0.1"
sphinx-click = "^4.3.0"
Pygments = "^2.10.0"
pyupgrade = "^3.1.0"
furo = ">=2021.11.12"
[tool.poetry.scripts]
spiffworkflow-backend = "spiffworkflow_backend.__main__:main"
[tool.poetry.group.dev.dependencies]
tomli = "^2.0.1"
[tool.pytest.ini_options]
# ignore deprecation warnings from various packages that we don't control
filterwarnings = [
# note the use of single quote below to denote "raw" strings in TOML
# kombu/utils/compat.py:82
'ignore:SelectableGroups dict interface is deprecated. Use select.',
# flask_marshmallow/__init__.py:34
# marshmallow_sqlalchemy/convert.py:17
'ignore:distutils Version classes are deprecated. Use packaging.version instead.',
# connexion/spec.py:50
'ignore:Passing a schema to Validator.iter_errors is deprecated and will be removed in a future release',
# connexion/decorators/validation.py:16
'ignore:Accessing jsonschema.draft4_format_checker is deprecated and will be removed in a future release.',
# connexion/apis/flask_api.py:236
"ignore:'_request_ctx_stack' is deprecated and will be removed in Flask 2.3",
"ignore:Setting 'json_encoder' on the app or a blueprint is deprecated and will be removed in Flask 2.3",
"ignore:'JSONEncoder' is deprecated and will be removed in Flask 2.3",
"ignore:'app.json_encoder' is deprecated and will be removed in Flask 2.3"
]
[tool.coverage.paths]
source = ["src", "*/site-packages"]
tests = ["tests", "*/tests"]
[tool.coverage.run]
branch = true
source = ["spiffworkflow_backend", "tests"]
[tool.coverage.report]
show_missing = true
fail_under = 50
[tool.mypy]
strict = true
disallow_any_generics = false
warn_unreachable = true
pretty = true
show_column_numbers = true
show_error_codes = true
show_error_context = true
plugins = "sqlmypy"
# We get 'error: Module has no attribute "set_context"' for sentry-sdk without this option
implicit_reexport = true
# allow for subdirs to NOT require __init__.py
namespace_packages = true
explicit_package_bases = false
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@ -44,6 +44,17 @@ if [[ "${1:-}" == "clean" ]]; then
# TODO: check to see if the db already exists and we can connect to it. also actually clean it up.
# start postgres in background with one db
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-}" == "postgres" ]]; then
container_name="postgres-spiff"
container_regex="^postgres-spiff$"
if [[ -n "$(docker ps -qa -f name=$container_regex)" ]]; then
echo ":: Found postgres container - $container_name"
if [[ -n "$(docker ps -q -f name=$container_regex)" ]]; then
echo ":: Stopping running container - $container_name"
docker stop $container_name
fi
echo ":: Removing stopped container - $container_name"
docker rm $container_name
fi
if ! docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_unit_testing -c "select 1"; then
docker run --name postgres-spiff -p 5432:5432 -e POSTGRES_PASSWORD=spiffworkflow_backend -e POSTGRES_USER=spiffworkflow_backend -e POSTGRES_DB=spiffworkflow_backend_unit_testing -d postgres
sleep 4 # classy

View File

@ -45,8 +45,8 @@ def app() -> Flask:
def with_db_and_bpmn_file_cleanup() -> None:
"""Do it cleanly!"""
meta = db.metadata
db.session.execute(db.update(BpmnProcessModel, values={"top_level_process_id": None}))
db.session.execute(db.update(BpmnProcessModel, values={"direct_parent_process_id": None}))
db.session.execute(db.update(BpmnProcessModel).values(top_level_process_id=None))
db.session.execute(db.update(BpmnProcessModel).values(direct_parent_process_id=None))
for table in reversed(meta.sorted_tables):
db.session.execute(table.delete())

View File

@ -26,9 +26,10 @@ fi
# https://stackoverflow.com/a/60579344/6090676
container_name="keycloak"
if [[ -n "$(docker ps -qa -f name=$container_name)" ]]; then
container_regex="^keycloak$"
if [[ -n "$(docker ps -qa -f name=$container_regex)" ]]; then
echo ":: Found container - $container_name"
if [[ -n "$(docker ps -q -f name=$container_name)" ]]; then
if [[ -n "$(docker ps -q -f name=$container_regex)" ]]; then
echo ":: Stopping running container - $container_name"
docker stop $container_name
fi

File diff suppressed because it is too large Load Diff

View File

@ -39,10 +39,16 @@ pytest-flask = "^1.2.0"
pytest-flask-sqlalchemy = "^1.1.0"
psycopg2 = "^2.9.3"
typing-extensions = "^4.4.0"
# pinned to higher than 65.5.0 because of a vulnerability
# and to lower than 67 because i didn't feel like addressing
# new deprecation warnings. we don't need this library explicitly,
# but at one time it was pulled in by various libs we depend on.
setuptools = "^65.5.1"
connexion = {extras = [ "swagger-ui",], version = "^2"}
lxml = "^4.9.1"
marshmallow-enum = "^1.5.1"
marshmallow-sqlalchemy = "^0.28.0"
PyJWT = "^2.6.0"
gunicorn = "^20.1.0"
APScheduler = "*"
@ -75,7 +81,9 @@ flask-jwt-extended = "^4.4.4"
pylint = "^2.15.10"
flask-simple-crypt = "^0.3.3"
cryptography = "^39.0.2"
safety = "^2.3.5"
sqlalchemy = "^2.0.7"
marshmallow-sqlalchemy = "^0.29.0"
[tool.poetry.dev-dependencies]
pytest = "^7.1.2"

View File

@ -44,8 +44,9 @@ class MyJSONEncoder(DefaultJSONProvider):
return obj.serialized
elif isinstance(obj, sqlalchemy.engine.row.Row): # type: ignore
return_dict = {}
for row_key in obj.keys():
row_value = obj[row_key]
row_mapping = obj._mapping
for row_key in row_mapping.keys():
row_value = row_mapping[row_key]
if hasattr(row_value, "serialized"):
return_dict.update(row_value.serialized)
elif hasattr(row_value, "__dict__"):

View File

@ -2,6 +2,7 @@
import time
import sqlalchemy
from sqlalchemy.sql import text
from spiffworkflow_backend.models.db import db
@ -9,7 +10,7 @@ from spiffworkflow_backend.models.db import db
def try_to_connect(start_time: float) -> None:
"""Try to connect."""
try:
db.first_or_404("select 1") # type: ignore
db.first_or_404(text("select 1")) # type: ignore
except sqlalchemy.exc.DatabaseError as exception:
if time.time() - start_time > 15:
raise exception

View File

@ -53,6 +53,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
"""ProcessInstanceModel."""
__tablename__ = "process_instance"
__allow_unmapped__ = True
id: int = db.Column(db.Integer, primary_key=True)
process_model_identifier: str = db.Column(db.String(255), nullable=False, index=True)
process_model_display_name: str = db.Column(db.String(255), nullable=False, index=True)

View File

@ -8,7 +8,6 @@ from typing import Optional
from typing import TypedDict
from sqlalchemy import ForeignKey
from sqlalchemy.orm import deferred
from sqlalchemy.orm import relationship
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
@ -69,7 +68,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
id: int = db.Column(db.Integer, primary_key=True)
identifier: str = db.Column(db.String(50), nullable=False, index=True)
report_metadata: dict = deferred(db.Column(db.JSON)) # type: ignore
report_metadata: dict = db.Column(db.JSON)
created_by_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore
created_by = relationship("UserModel")
created_at_in_seconds = db.Column(db.Integer)

View File

@ -47,6 +47,7 @@ class MultiInstanceType(enum.Enum):
@dataclass
class TaskModel(SpiffworkflowBaseDBModel):
__tablename__ = "task"
__allow_unmapped__ = True
id: int = db.Column(db.Integer, primary_key=True)
guid: str = db.Column(db.String(36), nullable=False, unique=True)
bpmn_process_id: int = db.Column(ForeignKey(BpmnProcessModel.id), nullable=False, index=True) # type: ignore

View File

@ -632,9 +632,10 @@ def process_instance_task_list(
status_code=400,
)
_parent_bpmn_processes, task_models_of_parent_bpmn_processes = (
TaskService.task_models_of_parent_bpmn_processes(to_task_model)
)
(
_parent_bpmn_processes,
task_models_of_parent_bpmn_processes,
) = TaskService.task_models_of_parent_bpmn_processes(to_task_model)
task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid]
task_model_query = task_model_query.filter(
or_(

View File

@ -43,11 +43,10 @@ from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore
from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent # type: ignore
from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ignore
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter # type: ignore
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
from SpiffWorkflow.exceptions import WorkflowTaskException
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ignore
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
@ -110,8 +109,6 @@ from spiffworkflow_backend.services.workflow_execution_service import (
WorkflowExecutionService,
)
SPIFF_SPEC_CONFIG["task_specs"].append(BusinessRuleTaskConverter)
# Sorry about all this crap. I wanted to move this thing to another file, but
# importing a bunch of types causes circular imports.
@ -1223,14 +1220,17 @@ class ProcessInstanceProcessor:
spiff_tasks_updated[task.id] = task
for updated_spiff_task in spiff_tasks_updated.values():
bpmn_process, task_model, new_task_models, new_json_data_dicts = (
TaskService.find_or_create_task_model_from_spiff_task(
(
bpmn_process,
task_model,
new_task_models,
new_json_data_dicts,
) = TaskService.find_or_create_task_model_from_spiff_task(
updated_spiff_task,
self.process_instance_model,
self._serializer,
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
)
)
bpmn_process_to_use = bpmn_process or task_model.bpmn_process
bpmn_process_json_data = TaskService.update_task_data_on_bpmn_process(
bpmn_process_to_use, updated_spiff_task.workflow.data
@ -1439,7 +1439,7 @@ class ProcessInstanceProcessor:
@staticmethod
def update_spiff_parser_with_all_process_dependency_files(
parser: BpmnDmnParser,
parser: SpiffBpmnParser,
processed_identifiers: Optional[set[str]] = None,
) -> None:
"""Update_spiff_parser_with_all_process_dependency_files."""

View File

@ -309,11 +309,14 @@ class ProcessInstanceReportService:
) -> list[dict]:
"""Add_metadata_columns_to_process_instance."""
results = []
for process_instance in process_instance_sqlalchemy_rows:
process_instance_dict = process_instance["ProcessInstanceModel"].serialized
for process_instance_row in process_instance_sqlalchemy_rows:
process_instance_mapping = process_instance_row._mapping
process_instance_dict = process_instance_row[0].serialized
for metadata_column in metadata_columns:
if metadata_column["accessor"] not in process_instance_dict:
process_instance_dict[metadata_column["accessor"]] = process_instance[metadata_column["accessor"]]
process_instance_dict[metadata_column["accessor"]] = process_instance_mapping[
metadata_column["accessor"]
]
results.append(process_instance_dict)
return results

View File

@ -279,6 +279,7 @@ class ProcessInstanceService:
yield (identifier, list_value, list_index)
if isinstance(list_value, dict) and len(list_value) == 1:
for v in list_value.values():
if isinstance(v, str):
yield (identifier, v, list_index)
@classmethod

View File

@ -1,4 +1,3 @@
import logging
import time
from typing import Callable
from typing import Optional
@ -149,14 +148,17 @@ class TaskModelSavingDelegate(EngineStepDelegate):
self.json_data_dicts[json_data_dict["hash"]] = json_data_dict
def _update_task_model_with_spiff_task(self, spiff_task: SpiffTask, task_failed: bool = False) -> TaskModel:
bpmn_process, task_model, new_task_models, new_json_data_dicts = (
TaskService.find_or_create_task_model_from_spiff_task(
(
bpmn_process,
task_model,
new_task_models,
new_json_data_dicts,
) = TaskService.find_or_create_task_model_from_spiff_task(
spiff_task,
self.process_instance,
self.serializer,
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
)
)
bpmn_process_json_data = TaskService.update_task_data_on_bpmn_process(
bpmn_process or task_model.bpmn_process, spiff_task.workflow.data
)
@ -317,10 +319,6 @@ class WorkflowExecutionService:
finally:
self.execution_strategy.save()
spiff_logger = logging.getLogger("spiff")
for handler in spiff_logger.handlers:
if hasattr(handler, "bulk_insert_logs"):
handler.bulk_insert_logs() # type: ignore
db.session.commit()
if save: