merged in main and resolved conflicts w/ burnettk
This commit is contained in:
commit
afc13bb751
|
@ -1,5 +1,5 @@
|
|||
"""This is used by bin/codemod/remove_all_unused_functions to remove a function from a file."""
|
||||
from bowler import Query
|
||||
from bowler.types import Leaf
|
||||
|
||||
# This came about because vulture (actually dead, from the list of Similar programs at https://pypi.org/project/vulture/)
|
||||
# actually found unused stuff, and I wanted to remove it.
|
||||
|
@ -7,6 +7,8 @@ from bowler.types import Leaf
|
|||
|
||||
|
||||
def remove_function(filename: str, function_name: str) -> None:
|
||||
"""Does the dirty work of actually removing the function from the file in place, or failing if it cannot."""
|
||||
|
||||
def remove_statement(node, capture, filename):
|
||||
node.remove()
|
||||
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 664bb2f00694
|
||||
Revises: 0c7428378d6e
|
||||
Create Date: 2023-04-27 13:32:04.143969
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '664bb2f00694'
|
||||
down_revision = '0c7428378d6e'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('process_instance', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('task_updated_at_in_seconds', sa.Integer(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('process_instance', schema=None) as batch_op:
|
||||
batch_op.drop_column('task_updated_at_in_seconds')
|
||||
|
||||
# ### end Alembic commands ###
|
File diff suppressed because it is too large
Load Diff
|
@ -153,3 +153,9 @@ SPIFFWORKFLOW_BACKEND_FEATURE_ELEMENT_UNITS_ENABLED = (
|
|||
SPIFFWORKFLOW_BACKEND_ELEMENT_UNITS_CACHE_DIR = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_ELEMENT_UNITS_CACHE_DIR", default=None
|
||||
)
|
||||
|
||||
# adds the ProxyFix to Flask on http by processing the 'X-Forwarded-Proto' header
|
||||
# to make SpiffWorkflow aware that it should return https for the server urls etc rather than http.
|
||||
SPIFFWORKFLOW_BACKEND_USE_WERKZEUG_MIDDLEWARE_PROXY_FIX = (
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_USE_WERKZEUG_MIDDLEWARE_PROXY_FIX", default="false") == "true"
|
||||
)
|
||||
|
|
|
@ -95,6 +95,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
|
||||
start_in_seconds: int | None = db.Column(db.Integer, index=True)
|
||||
end_in_seconds: int | None = db.Column(db.Integer, index=True)
|
||||
task_updated_at_in_seconds: int = db.Column(db.Integer, nullable=True)
|
||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
||||
status: str = db.Column(db.String(50), index=True)
|
||||
|
@ -122,6 +123,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
"bpmn_version_control_identifier": self.bpmn_version_control_identifier,
|
||||
"bpmn_version_control_type": self.bpmn_version_control_type,
|
||||
"process_initiator_username": self.process_initiator.username,
|
||||
"task_updated_at_in_seconds": self.task_updated_at_in_seconds,
|
||||
}
|
||||
|
||||
def serialized_with_metadata(self) -> dict[str, Any]:
|
||||
|
|
|
@ -93,7 +93,7 @@ class ProcessInstanceReportService:
|
|||
{"Header": "Task", "accessor": "task_title"},
|
||||
{"Header": "Waiting For", "accessor": "waiting_for"},
|
||||
{"Header": "Started", "accessor": "start_in_seconds"},
|
||||
{"Header": "Last Updated", "accessor": "updated_at_in_seconds"},
|
||||
{"Header": "Last Updated", "accessor": "task_updated_at_in_seconds"},
|
||||
{"Header": "status", "accessor": "status"},
|
||||
],
|
||||
"filter_by": [
|
||||
|
@ -116,7 +116,7 @@ class ProcessInstanceReportService:
|
|||
{"Header": "Task", "accessor": "task_title"},
|
||||
{"Header": "Started By", "accessor": "process_initiator_username"},
|
||||
{"Header": "Started", "accessor": "start_in_seconds"},
|
||||
{"Header": "Last Updated", "accessor": "updated_at_in_seconds"},
|
||||
{"Header": "Last Updated", "accessor": "task_updated_at_in_seconds"},
|
||||
],
|
||||
"filter_by": [
|
||||
{"field_name": "with_tasks_i_can_complete", "field_value": True},
|
||||
|
@ -138,7 +138,7 @@ class ProcessInstanceReportService:
|
|||
{"Header": "Task", "accessor": "task_title"},
|
||||
{"Header": "Started By", "accessor": "process_initiator_username"},
|
||||
{"Header": "Started", "accessor": "start_in_seconds"},
|
||||
{"Header": "Last Updated", "accessor": "updated_at_in_seconds"},
|
||||
{"Header": "Last Updated", "accessor": "task_updated_at_in_seconds"},
|
||||
],
|
||||
"filter_by": [
|
||||
{"field_name": "process_status", "field_value": active_status_values},
|
||||
|
|
|
@ -2,7 +2,10 @@
|
|||
import base64
|
||||
import hashlib
|
||||
import time
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Generator
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
|
@ -12,6 +15,7 @@ from urllib.parse import unquote
|
|||
import sentry_sdk
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from SpiffWorkflow.bpmn.specs.events.event_definitions import TimerEventDefinition # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.events.IntermediateEvent import _BoundaryEventParent # type: ignore
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
|
||||
|
@ -86,6 +90,29 @@ class ProcessInstanceService:
|
|||
process_model = ProcessModelService.get_process_model(process_model_identifier)
|
||||
return cls.create_process_instance(process_model, user)
|
||||
|
||||
@classmethod
|
||||
def waiting_event_can_be_skipped(cls, waiting_event: Dict[str, Any], now_in_utc: datetime) -> bool:
|
||||
#
|
||||
# over time this function can gain more knowledge of different event types,
|
||||
# for now we are just handling Duration Timer events.
|
||||
#
|
||||
# example: {'event_type': 'Duration Timer', 'name': None, 'value': '2023-04-27T20:15:10.626656+00:00'}
|
||||
#
|
||||
event_type = waiting_event.get("event_type")
|
||||
if event_type == "Duration Timer":
|
||||
event_value = waiting_event.get("value")
|
||||
if event_value is not None:
|
||||
event_datetime = TimerEventDefinition.get_datetime(event_value)
|
||||
return event_datetime > now_in_utc # type: ignore
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def all_waiting_events_can_be_skipped(cls, waiting_events: List[Dict[str, Any]]) -> bool:
|
||||
for waiting_event in waiting_events:
|
||||
if not cls.waiting_event_can_be_skipped(waiting_event, datetime.now(timezone.utc)):
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def ready_user_task_has_associated_timer(cls, processor: ProcessInstanceProcessor) -> bool:
|
||||
for ready_user_task in processor.bpmn_process_instance.get_ready_user_tasks():
|
||||
|
@ -101,7 +128,10 @@ class ProcessInstanceService:
|
|||
if processor.process_instance_model.status != status_value:
|
||||
return True
|
||||
|
||||
return status_value == "user_input_required" and not cls.ready_user_task_has_associated_timer(processor)
|
||||
if status_value == "user_input_required" and cls.ready_user_task_has_associated_timer(processor):
|
||||
return cls.all_waiting_events_can_be_skipped(processor.bpmn_process_instance.waiting_events())
|
||||
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def do_waiting(cls, status_value: str = ProcessInstanceStatus.waiting.value) -> None:
|
||||
|
|
|
@ -157,6 +157,7 @@ class TaskModelSavingDelegate(EngineStepDelegate):
|
|||
# # self._add_parents(spiff_task)
|
||||
|
||||
self.last_completed_spiff_task = spiff_task
|
||||
self.process_instance.task_updated_at_in_seconds = round(time.time())
|
||||
if self.secondary_engine_step_delegate:
|
||||
self.secondary_engine_step_delegate.did_complete_task(spiff_task)
|
||||
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
"""Test_process_instance_processor."""
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
from typing import Optional
|
||||
|
||||
from flask.app import Flask
|
||||
|
@ -213,3 +215,33 @@ class TestProcessInstanceService(BaseTest):
|
|||
assert len(models) == 2
|
||||
self._check_sample_file_data_model("File", 0, models[0])
|
||||
self._check_sample_file_data_model("File", 1, models[1])
|
||||
|
||||
def test_does_not_skip_events_it_does_not_know_about(self) -> None:
|
||||
assert not (
|
||||
ProcessInstanceService.waiting_event_can_be_skipped(
|
||||
{"event_type": "Unknown", "name": None, "value": "2023-04-27T20:15:10.626656+00:00"},
|
||||
datetime.now(timezone.utc),
|
||||
)
|
||||
)
|
||||
|
||||
def test_does_skip_duration_timer_events_for_the_future(self) -> None:
|
||||
assert ProcessInstanceService.waiting_event_can_be_skipped(
|
||||
{"event_type": "Duration Timer", "name": None, "value": "2023-04-27T20:15:10.626656+00:00"},
|
||||
datetime.fromisoformat("2023-04-26T20:15:10.626656+00:00"),
|
||||
)
|
||||
|
||||
def test_does_not_skip_duration_timer_events_for_the_past(self) -> None:
|
||||
assert not (
|
||||
ProcessInstanceService.waiting_event_can_be_skipped(
|
||||
{"event_type": "Duration Timer", "name": None, "value": "2023-04-27T20:15:10.626656+00:00"},
|
||||
datetime.fromisoformat("2023-04-28T20:15:10.626656+00:00"),
|
||||
)
|
||||
)
|
||||
|
||||
def test_does_not_skip_duration_timer_events_for_now(self) -> None:
|
||||
assert not (
|
||||
ProcessInstanceService.waiting_event_can_be_skipped(
|
||||
{"event_type": "Duration Timer", "name": None, "value": "2023-04-27T20:15:10.626656+00:00"},
|
||||
datetime.fromisoformat("2023-04-27T20:15:10.626656+00:00"),
|
||||
)
|
||||
)
|
||||
|
|
|
@ -8,6 +8,11 @@ from spiffworkflow_backend.services.acceptance_test_fixtures import (
|
|||
|
||||
app = create_app()
|
||||
|
||||
if app.config["SPIFFWORKFLOW_BACKEND_USE_WERKZEUG_MIDDLEWARE_PROXY_FIX"]:
|
||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||
|
||||
app.wsgi_app = ProxyFix(app.wsgi_app, x_proto=1)
|
||||
|
||||
# this is in here because when we put it in the create_app function,
|
||||
# it also loaded when we were running migrations, which resulted in a chicken/egg thing.
|
||||
if os.environ.get("SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA") == "true":
|
||||
|
|
|
@ -1521,6 +1521,7 @@ export default function ProcessInstanceListTable({
|
|||
start_in_seconds: formatSecondsForDisplay,
|
||||
end_in_seconds: formatSecondsForDisplay,
|
||||
updated_at_in_seconds: formatSecondsForDisplay,
|
||||
task_updated_at_in_seconds: formatSecondsForDisplay,
|
||||
};
|
||||
const formatter =
|
||||
reportColumnFormatters[column.accessor] ?? defaultFormatter;
|
||||
|
@ -1546,6 +1547,13 @@ export default function ProcessInstanceListTable({
|
|||
/>
|
||||
);
|
||||
}
|
||||
if (column.accessor === 'task_updated_at_in_seconds') {
|
||||
return (
|
||||
<TableCellWithTimeAgoInWords
|
||||
timeInSeconds={row.task_updated_at_in_seconds}
|
||||
/>
|
||||
);
|
||||
}
|
||||
return (
|
||||
// eslint-disable-next-line jsx-a11y/no-noninteractive-element-interactions
|
||||
<td data-qa={`process-instance-show-link-${column.accessor}`}>
|
||||
|
|
Loading…
Reference in New Issue