garbage related to trimming down process instance logging
This commit is contained in:
parent
11f137f8b0
commit
4361a5f8ec
|
@ -1827,25 +1827,22 @@ test = ["pytest"]
|
|||
[[package]]
|
||||
name = "SpiffWorkflow"
|
||||
version = "1.1.7"
|
||||
description = ""
|
||||
description = "A workflow framework and BPMN/DMN Processor"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
develop = false
|
||||
develop = true
|
||||
|
||||
[package.dependencies]
|
||||
celery = "*"
|
||||
configparser = "*"
|
||||
dateparser = "*"
|
||||
importlib-metadata = "<5.0"
|
||||
lxml = "*"
|
||||
pytz = "*"
|
||||
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "76947aa98d81826b88b2eefd05ebae4427b00e02"
|
||||
type = "directory"
|
||||
url = "../SpiffWorkflow"
|
||||
|
||||
[[package]]
|
||||
name = "SQLAlchemy"
|
||||
|
@ -2157,7 +2154,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=
|
|||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.9"
|
||||
content-hash = "7a3c07a2eef00685adbf44b6e26b740e20fc52bf85e916b6c171b13d4fcc6dc9"
|
||||
content-hash = "5288d4bb2291fc469014bda00cf8da705423a050a20d526a8a089df35c746042"
|
||||
|
||||
[metadata.files]
|
||||
alabaster = [
|
||||
|
|
|
@ -28,8 +28,8 @@ flask-migrate = "*"
|
|||
flask-restful = "*"
|
||||
werkzeug = "*"
|
||||
# go back to main once https://github.com/sartography/SpiffWorkflow/pull/241 is merged
|
||||
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
|
||||
# SpiffWorkflow = {develop = true, path = "/Users/kevin/projects/github/sartography/SpiffWorkflow"}
|
||||
# SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
|
||||
SpiffWorkflow = {develop = true, path = "/Users/kevin/projects/github/sartography/SpiffWorkflow"}
|
||||
# SpiffWorkflow = {develop = true, path = "/home/jason/projects/github/sartography/SpiffWorkflow"}
|
||||
sentry-sdk = "1.9.0"
|
||||
sphinx-autoapi = "^1.8.4"
|
||||
|
|
|
@ -845,17 +845,17 @@ def process_instance_task_list(
|
|||
process_instance = find_process_instance_by_id_or_raise(process_instance_id)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
|
||||
spiff_tasks = None
|
||||
if all_tasks:
|
||||
spiff_tasks = processor.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
|
||||
else:
|
||||
spiff_tasks = processor.get_all_user_tasks()
|
||||
# spiff_tasks = None
|
||||
# if all_tasks:
|
||||
# spiff_tasks = processor.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
|
||||
# else:
|
||||
# spiff_tasks = processor.get_all_user_tasks()
|
||||
|
||||
tasks = []
|
||||
for spiff_task in spiff_tasks:
|
||||
task = ProcessInstanceService.spiff_task_to_api_task(spiff_task)
|
||||
task.data = spiff_task.data
|
||||
tasks.append(task)
|
||||
# for spiff_task in spiff_tasks:
|
||||
# task = ProcessInstanceService.spiff_task_to_api_task(spiff_task)
|
||||
# task.data = spiff_task.data
|
||||
# tasks.append(task)
|
||||
|
||||
return make_response(jsonify(tasks), 200)
|
||||
|
||||
|
|
|
@ -271,6 +271,7 @@ class ProcessInstanceProcessor:
|
|||
self.add_user_info_to_process_instance(self.bpmn_process_instance)
|
||||
|
||||
if self.PROCESS_INSTANCE_ID_KEY not in self.bpmn_process_instance.data:
|
||||
|
||||
if not process_instance_model.id:
|
||||
db.session.add(process_instance_model)
|
||||
# If the model is new, and has no id, save it, write it into the process_instance model
|
||||
|
@ -418,20 +419,20 @@ class ProcessInstanceProcessor:
|
|||
"""__get_bpmn_process_instance."""
|
||||
if process_instance_model.bpmn_json:
|
||||
# turn off logging to avoid duplicated spiff logs
|
||||
spiff_logger = logging.getLogger("spiff")
|
||||
original_spiff_logger_log_level = spiff_logger.level
|
||||
spiff_logger.setLevel(logging.WARNING)
|
||||
# spiff_logger = logging.getLogger("spiff")
|
||||
# original_spiff_logger_log_level = spiff_logger.level
|
||||
# spiff_logger.setLevel(logging.WARNING)
|
||||
|
||||
try:
|
||||
bpmn_process_instance = (
|
||||
ProcessInstanceProcessor._serializer.deserialize_json(
|
||||
process_instance_model.bpmn_json
|
||||
)
|
||||
# try:
|
||||
bpmn_process_instance = (
|
||||
ProcessInstanceProcessor._serializer.deserialize_json(
|
||||
process_instance_model.bpmn_json
|
||||
)
|
||||
except Exception as err:
|
||||
raise (err)
|
||||
finally:
|
||||
spiff_logger.setLevel(original_spiff_logger_log_level)
|
||||
)
|
||||
# except Exception as err:
|
||||
# raise (err)
|
||||
# finally:
|
||||
# spiff_logger.setLevel(original_spiff_logger_log_level)
|
||||
|
||||
bpmn_process_instance.script_engine = (
|
||||
ProcessInstanceProcessor._script_engine
|
||||
|
|
Loading…
Reference in New Issue