Merge branch 'main' into feature/more-secret-secrets
This commit is contained in:
commit
4bbe10be00
|
@ -0,0 +1,41 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
function error_handler() {
|
||||
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
|
||||
exit "$2"
|
||||
}
|
||||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
# Function to check if a string matches the file name pattern
|
||||
matches_filename_pattern() {
|
||||
local file_name="$1"
|
||||
local comment_line="$2"
|
||||
|
||||
# Remove file extension and capitalize the first letter
|
||||
local expected_comment=$(basename "$file_name" .py)
|
||||
|
||||
expected_comment_with_first_letter_capitalized="${expected_comment^}"
|
||||
|
||||
if grep -Eq "\"\"\"${expected_comment}\.\"\"\"" <<< "$comment_line"; then
|
||||
return 0
|
||||
else
|
||||
if grep -Eq "\"\"\"${expected_comment_with_first_letter_capitalized}\.\"\"\"" <<< "$comment_line"; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Process each Python file in the "src" and "tests" directories
|
||||
for file in $(find src tests -type f -name '*.py'); do
|
||||
# Read the first line of the file
|
||||
read -r first_line < "$file"
|
||||
|
||||
# Check if the first line matches the expected comment pattern
|
||||
if matches_filename_pattern "$file" "$first_line"; then
|
||||
# Remove the comment from the file
|
||||
hot_sed -i '1d' "$file"
|
||||
fi
|
||||
done
|
|
@ -0,0 +1,42 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: e4b6bbf83a3e
|
||||
Revises: 6aa02463da9c
|
||||
Create Date: 2023-05-30 10:17:10.595965
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'e4b6bbf83a3e'
|
||||
down_revision = '6aa02463da9c'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('process_model_cycle',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('process_model_identifier', sa.String(length=255), nullable=False),
|
||||
sa.Column('cycle_count', sa.Integer(), nullable=True),
|
||||
sa.Column('duration_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('current_cycle', sa.Integer(), nullable=True),
|
||||
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
with op.batch_alter_table('process_model_cycle', schema=None) as batch_op:
|
||||
batch_op.create_index(batch_op.f('ix_process_model_cycle_process_model_identifier'), ['process_model_identifier'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('process_model_cycle', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_process_model_cycle_process_model_identifier'))
|
||||
|
||||
op.drop_table('process_model_cycle')
|
||||
# ### end Alembic commands ###
|
|
@ -1,4 +1,3 @@
|
|||
"""__init__."""
|
||||
import base64
|
||||
import faulthandler
|
||||
import json
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Default."""
|
||||
import re
|
||||
from os import environ
|
||||
|
||||
|
|
|
@ -1,2 +1 @@
|
|||
"""Api_version."""
|
||||
V1_API_PATH_PREFIX = "/v1.0"
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Db_helper."""
|
||||
import time
|
||||
|
||||
import sqlalchemy
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Interfaces."""
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import NewType
|
||||
from typing import TypedDict
|
||||
|
|
|
@ -76,5 +76,8 @@ from spiffworkflow_backend.models.process_instance_queue import (
|
|||
from spiffworkflow_backend.models.active_user import (
|
||||
ActiveUserModel,
|
||||
) # noqa: F401
|
||||
from spiffworkflow_backend.models.process_model_cycle import (
|
||||
ProcessModelCycleModel,
|
||||
) # noqa: F401
|
||||
|
||||
add_listeners()
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
"""__init__."""
|
|
@ -1,4 +1,3 @@
|
|||
"""Db."""
|
||||
from __future__ import annotations
|
||||
|
||||
import enum
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Group."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Human_task."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Human_task_user."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Message_instance."""
|
||||
import enum
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Principal."""
|
||||
from dataclasses import dataclass
|
||||
|
||||
from sqlalchemy import ForeignKey
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Process_group."""
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Process_instance."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Process_instance_file_data."""
|
||||
from dataclasses import dataclass
|
||||
|
||||
from sqlalchemy import ForeignKey
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Process_instance_queue."""
|
||||
from dataclasses import dataclass
|
||||
|
||||
from sqlalchemy import ForeignKey
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Process_model."""
|
||||
from __future__ import annotations
|
||||
|
||||
import enum
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
from dataclasses import dataclass
|
||||
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.db import db
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProcessModelCycleModel(SpiffworkflowBaseDBModel):
|
||||
"""ProcessInstanceQueueModel."""
|
||||
|
||||
__tablename__ = "process_model_cycle"
|
||||
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
process_model_identifier: str = db.Column(db.String(255), nullable=False, index=True)
|
||||
cycle_count: int = db.Column(db.Integer)
|
||||
duration_in_seconds: int = db.Column(db.Integer)
|
||||
current_cycle: int = db.Column(db.Integer)
|
||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
|
@ -1,4 +1,3 @@
|
|||
"""Refresh_token."""
|
||||
from dataclasses import dataclass
|
||||
|
||||
from sqlalchemy import ForeignKey
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Secret_model."""
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Task."""
|
||||
import enum
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""User."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
"""__init__."""
|
|
@ -1 +0,0 @@
|
|||
"""__init__."""
|
|
@ -1,4 +1,3 @@
|
|||
"""User."""
|
||||
import ast
|
||||
import base64
|
||||
import json
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Users_controller."""
|
||||
from typing import Any
|
||||
|
||||
import flask
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Delete_process_instances_with_criteria."""
|
||||
from time import time
|
||||
from typing import Any
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Fact_service."""
|
||||
from typing import Any
|
||||
|
||||
from spiffworkflow_backend.models.script_attributes_context import ScriptAttributesContext
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Get_data_sizes."""
|
||||
from typing import Any
|
||||
|
||||
from spiffworkflow_backend.models.script_attributes_context import ScriptAttributesContext
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Get_encoded_file_data."""
|
||||
import base64
|
||||
from typing import Any
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Get_env."""
|
||||
from typing import Any
|
||||
|
||||
from spiffworkflow_backend.models.script_attributes_context import ScriptAttributesContext
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Get_localtime."""
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Get_secret."""
|
||||
from typing import Any
|
||||
|
||||
from spiffworkflow_backend.models.script_attributes_context import ScriptAttributesContext
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Markdown_file_download_link."""
|
||||
from typing import Any
|
||||
|
||||
from flask import current_app
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Script."""
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Acceptance_test_fixtures."""
|
||||
import time
|
||||
|
||||
from flask import current_app
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Assertion_service."""
|
||||
import contextlib
|
||||
from collections.abc import Generator
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Background_processing_service."""
|
||||
import flask
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
from spiffworkflow_backend.services.message_service import MessageService
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Custom_parser."""
|
||||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
|
||||
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
|
||||
from spiffworkflow_backend.specs.start_event import StartEvent
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Data_setup_service."""
|
||||
from flask import current_app
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
"""Email_service."""
|
||||
|
||||
from flask import current_app
|
||||
from flask_mail import Message # type: ignore
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""File_system_service."""
|
||||
import os
|
||||
from collections.abc import Generator
|
||||
from contextlib import contextmanager
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Git_service."""
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Logging_service."""
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
"""Message_service."""
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||
from spiffworkflow_backend.models.message_instance import MessageStatuses
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Process_instance_processor."""
|
||||
# TODO: clean up this service for a clear distinction between it and the process_instance_service
|
||||
# where this points to the pi service
|
||||
import decimal
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Process_instance_report_service."""
|
||||
import copy
|
||||
import re
|
||||
from collections.abc import Generator
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Process_instance_service."""
|
||||
import base64
|
||||
import hashlib
|
||||
import time
|
||||
|
@ -22,6 +21,7 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
|||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
from spiffworkflow_backend.models.process_instance_file_data import ProcessInstanceFileDataModel
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.process_model_cycle import ProcessModelCycleModel
|
||||
from spiffworkflow_backend.models.task import Task
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
|
@ -34,6 +34,7 @@ from spiffworkflow_backend.services.process_instance_queue_service import Proces
|
|||
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.workflow_service import WorkflowService
|
||||
from spiffworkflow_backend.specs.start_event import StartConfiguration
|
||||
|
||||
|
||||
class ProcessInstanceService:
|
||||
|
@ -43,22 +44,27 @@ class ProcessInstanceService:
|
|||
TASK_STATE_LOCKED = "locked"
|
||||
|
||||
@staticmethod
|
||||
def calculate_start_delay_in_seconds(process_instance_model: ProcessInstanceModel) -> int:
|
||||
def next_start_event_configuration(process_instance_model: ProcessInstanceModel) -> StartConfiguration:
|
||||
try:
|
||||
processor = ProcessInstanceProcessor(process_instance_model)
|
||||
delay_in_seconds = WorkflowService.calculate_run_at_delay_in_seconds(
|
||||
start_configuration = WorkflowService.next_start_event_configuration(
|
||||
processor.bpmn_process_instance, datetime.now(timezone.utc)
|
||||
)
|
||||
except Exception:
|
||||
delay_in_seconds = 0
|
||||
return delay_in_seconds
|
||||
start_configuration = None
|
||||
|
||||
if start_configuration is None:
|
||||
start_configuration = (0, 0, 0)
|
||||
|
||||
return start_configuration
|
||||
|
||||
@classmethod
|
||||
def create_process_instance(
|
||||
cls,
|
||||
process_model: ProcessModelInfo,
|
||||
user: UserModel,
|
||||
) -> ProcessInstanceModel:
|
||||
start_configuration: StartConfiguration | None = None,
|
||||
) -> tuple[ProcessInstanceModel, StartConfiguration]:
|
||||
"""Get_process_instance_from_spec."""
|
||||
db.session.commit()
|
||||
try:
|
||||
|
@ -76,10 +82,13 @@ class ProcessInstanceService:
|
|||
)
|
||||
db.session.add(process_instance_model)
|
||||
db.session.commit()
|
||||
delay_in_seconds = cls.calculate_start_delay_in_seconds(process_instance_model)
|
||||
|
||||
if start_configuration is None:
|
||||
start_configuration = cls.next_start_event_configuration(process_instance_model)
|
||||
_, delay_in_seconds, _ = start_configuration
|
||||
run_at_in_seconds = round(time.time()) + delay_in_seconds
|
||||
ProcessInstanceQueueService.enqueue_new_process_instance(process_instance_model, run_at_in_seconds)
|
||||
return process_instance_model
|
||||
return (process_instance_model, start_configuration)
|
||||
|
||||
@classmethod
|
||||
def create_process_instance_from_process_model_identifier(
|
||||
|
@ -89,7 +98,52 @@ class ProcessInstanceService:
|
|||
) -> ProcessInstanceModel:
|
||||
"""Create_process_instance_from_process_model_identifier."""
|
||||
process_model = ProcessModelService.get_process_model(process_model_identifier)
|
||||
return cls.create_process_instance(process_model, user)
|
||||
process_instance_model, (cycle_count, _, duration_in_seconds) = cls.create_process_instance(
|
||||
process_model, user
|
||||
)
|
||||
cls.register_process_model_cycles(process_model_identifier, cycle_count, duration_in_seconds)
|
||||
return process_instance_model
|
||||
|
||||
@classmethod
|
||||
def register_process_model_cycles(
|
||||
cls, process_model_identifier: str, cycle_count: int, duration_in_seconds: int
|
||||
) -> None:
|
||||
# clean up old cycle record if it exists. event if the given cycle_count is 0 the previous version
|
||||
# of the model could have included a cycle timer start event
|
||||
cycles = ProcessModelCycleModel.query.filter(
|
||||
ProcessModelCycleModel.process_model_identifier == process_model_identifier,
|
||||
).all()
|
||||
|
||||
for cycle in cycles:
|
||||
db.session.delete(cycle)
|
||||
|
||||
if cycle_count != 0:
|
||||
cycle = ProcessModelCycleModel(
|
||||
process_model_identifier=process_model_identifier,
|
||||
cycle_count=cycle_count,
|
||||
duration_in_seconds=duration_in_seconds,
|
||||
current_cycle=0,
|
||||
)
|
||||
db.session.add(cycle)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
@classmethod
|
||||
def schedule_next_process_model_cycle(cls, process_instance_model: ProcessInstanceModel) -> None:
|
||||
cycle = ProcessModelCycleModel.query.filter(
|
||||
ProcessModelCycleModel.process_model_identifier == process_instance_model.process_model_identifier
|
||||
).first()
|
||||
|
||||
if cycle is None or cycle.cycle_count == 0:
|
||||
return
|
||||
|
||||
if cycle.cycle_count == -1 or cycle.current_cycle < cycle.cycle_count:
|
||||
process_model = ProcessModelService.get_process_model(process_instance_model.process_model_identifier)
|
||||
start_configuration = (cycle.cycle_count, cycle.duration_in_seconds, cycle.duration_in_seconds)
|
||||
cls.create_process_instance(process_model, process_instance_model.process_initiator, start_configuration)
|
||||
cycle.current_cycle += 1
|
||||
db.session.add(cycle)
|
||||
db.session.commit()
|
||||
|
||||
@classmethod
|
||||
def waiting_event_can_be_skipped(cls, waiting_event: dict[str, Any], now_in_utc: datetime) -> bool:
|
||||
|
@ -156,6 +210,8 @@ class ProcessInstanceService:
|
|||
cls.run_process_instance_with_processor(
|
||||
process_instance, status_value=status_value, execution_strategy_name=execution_strategy_name
|
||||
)
|
||||
if process_instance.status == "complete":
|
||||
cls.schedule_next_process_model_cycle(process_instance)
|
||||
except ProcessInstanceIsAlreadyLockedError:
|
||||
continue
|
||||
except Exception as e:
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Process_model_service."""
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
"""Secret_service."""
|
||||
|
||||
from flask import current_app
|
||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||
from spiffworkflow_backend.models.db import db
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Spec_file_service."""
|
||||
import os
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""User_service."""
|
||||
from typing import Any
|
||||
|
||||
from flask import current_app
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
"""workflow_service."""
|
||||
from datetime import datetime
|
||||
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from spiffworkflow_backend.specs.start_event import StartConfiguration
|
||||
from spiffworkflow_backend.specs.start_event import StartEvent
|
||||
|
||||
|
||||
|
@ -15,24 +15,13 @@ class WorkflowService:
|
|||
return [t for t in workflow.get_tasks(TaskState.FUTURE) if isinstance(t.task_spec, StartEvent)]
|
||||
|
||||
@classmethod
|
||||
def next_start_event_delay_in_seconds(cls, workflow: BpmnWorkflow, now_in_utc: datetime) -> int:
|
||||
def next_start_event_configuration(cls, workflow: BpmnWorkflow, now_in_utc: datetime) -> StartConfiguration | None:
|
||||
start_events = cls.future_start_events(workflow)
|
||||
start_delays: list[int] = []
|
||||
for start_event in start_events:
|
||||
start_delay = start_event.task_spec.start_delay_in_seconds(start_event, now_in_utc)
|
||||
start_delays.append(start_delay)
|
||||
start_delays.sort()
|
||||
return start_delays[0] if len(start_delays) > 0 else 0
|
||||
|
||||
@classmethod
|
||||
def calculate_run_at_delay_in_seconds(cls, workflow: BpmnWorkflow, now_in_utc: datetime) -> int:
|
||||
# TODO: for now we are using the first start time because I am not sure how multiple
|
||||
# start events should work. I think the right answer is to take the earliest start
|
||||
# time and have later start events stay FUTURE/WAITING?, then we need to be able
|
||||
# to respect the other start events when enqueue'ing.
|
||||
#
|
||||
# TODO: this method should also expand to include other FUTURE/WAITING timers when
|
||||
# enqueue'ing so that we don't have to check timers every 10 or whatever seconds
|
||||
# right now we assume that this is being called to create a process
|
||||
|
||||
return cls.next_start_event_delay_in_seconds(workflow, now_in_utc)
|
||||
configurations = list(
|
||||
map(
|
||||
lambda start_event: start_event.task_spec.configuration(start_event, now_in_utc), # type: ignore
|
||||
start_events,
|
||||
)
|
||||
)
|
||||
configurations.sort(key=lambda configuration: configuration[1]) # type: ignore
|
||||
return configurations[0] if len(configurations) > 0 else None
|
||||
|
|
|
@ -13,8 +13,11 @@ from SpiffWorkflow.bpmn.specs.event_definitions import TimerEventDefinition
|
|||
from SpiffWorkflow.spiff.parser.event_parsers import SpiffStartEventParser # type: ignore
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
|
||||
StartConfiguration = tuple[int, int, int]
|
||||
|
||||
# TODO: cylce timers and repeat counts?
|
||||
|
||||
|
||||
class StartEvent(DefaultStartEvent): # type: ignore
|
||||
def __init__(self, wf_spec, bpmn_id, event_definition, **kwargs): # type: ignore
|
||||
if isinstance(event_definition, TimerEventDefinition):
|
||||
|
@ -33,27 +36,36 @@ class StartEvent(DefaultStartEvent): # type: ignore
|
|||
def register_parser_class(parser_config: dict[str, Any]) -> None:
|
||||
parser_config[full_tag("startEvent")] = (SpiffStartEventParser, StartEvent)
|
||||
|
||||
def start_delay_in_seconds(self, my_task: SpiffTask, now_in_utc: datetime) -> int:
|
||||
script_engine = my_task.workflow.script_engine
|
||||
evaluated_expression = None
|
||||
parsed_duration = None
|
||||
|
||||
if isinstance(self.timer_definition, TimerEventDefinition) and script_engine is not None:
|
||||
evaluated_expression = script_engine.evaluate(my_task, self.timer_definition.expression)
|
||||
def configuration(self, my_task: SpiffTask, now_in_utc: datetime) -> StartConfiguration:
|
||||
evaluated_expression = self.evaluated_timer_expression(my_task)
|
||||
cycles = 0
|
||||
start_delay_in_seconds = 0
|
||||
duration = 0
|
||||
|
||||
if evaluated_expression is not None:
|
||||
if isinstance(self.timer_definition, TimeDateEventDefinition):
|
||||
parsed_duration = TimerEventDefinition.parse_time_or_duration(evaluated_expression)
|
||||
time_delta = parsed_duration - now_in_utc
|
||||
return time_delta.seconds # type: ignore
|
||||
start_delay_in_seconds = time_delta.seconds
|
||||
elif isinstance(self.timer_definition, DurationTimerEventDefinition):
|
||||
parsed_duration = TimerEventDefinition.parse_iso_duration(evaluated_expression)
|
||||
time_delta = TimerEventDefinition.get_timedelta_from_start(parsed_duration, now_in_utc)
|
||||
return time_delta.seconds # type: ignore
|
||||
start_delay_in_seconds = time_delta.seconds
|
||||
elif isinstance(self.timer_definition, CycleTimerEventDefinition):
|
||||
return 0
|
||||
cycles, start, cycle_duration = TimerEventDefinition.parse_iso_recurring_interval(evaluated_expression)
|
||||
time_delta = start - now_in_utc + cycle_duration
|
||||
start_delay_in_seconds = time_delta.seconds
|
||||
duration = cycle_duration.seconds
|
||||
|
||||
return 0
|
||||
return (cycles, start_delay_in_seconds, duration)
|
||||
|
||||
def evaluated_timer_expression(self, my_task: SpiffTask) -> Any:
|
||||
script_engine = my_task.workflow.script_engine
|
||||
evaluated_expression = None
|
||||
|
||||
if isinstance(self.timer_definition, TimerEventDefinition) and script_engine is not None:
|
||||
evaluated_expression = script_engine.evaluate(my_task, self.timer_definition.expression)
|
||||
return evaluated_expression
|
||||
|
||||
|
||||
class StartEventConverter(EventConverter): # type: ignore
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
"""__init__."""
|
|
@ -1,4 +1,3 @@
|
|||
"""Base_test."""
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Example_data."""
|
||||
import glob
|
||||
import os
|
||||
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
"""__init__."""
|
|
@ -1,4 +1,3 @@
|
|||
"""Test_authentication."""
|
||||
import ast
|
||||
import base64
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Test_authorization."""
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Test_logging_service."""
|
||||
from uuid import UUID
|
||||
|
||||
from flask.app import Flask
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Test_nested_groups."""
|
||||
import json
|
||||
|
||||
from flask.app import Flask
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
"""Test_secret_service."""
|
||||
import json
|
||||
|
||||
import pytest
|
||||
from flask.app import Flask
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Test_user_blueprint."""
|
||||
# TODO: fix these tests for new authing system
|
||||
# """Test User Blueprint."""
|
||||
# import json
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Test_users_controller."""
|
||||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Test_acceptance_test_fixtures."""
|
||||
import os
|
||||
|
||||
from flask.app import Flask
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Test_file."""
|
||||
from datetime import datetime
|
||||
|
||||
from spiffworkflow_backend.models.file import File
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Test_message_instance."""
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask.testing import FlaskClient
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Test_process_instance_processor."""
|
||||
from uuid import UUID
|
||||
|
||||
import pytest
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Test_process_instance_queue_service."""
|
||||
import time
|
||||
from contextlib import suppress
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Test_process_instance_report_service."""
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask.testing import FlaskClient
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Test_process_model_service."""
|
||||
from flask import Flask
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Test_various_bpmn_constructs."""
|
||||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
"""Test_workflow_service."""
|
||||
from collections.abc import Generator
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
|
@ -85,7 +84,7 @@ class TestWorkflowService(BaseTest):
|
|||
""",
|
||||
"no_tasks",
|
||||
)
|
||||
delay = WorkflowService.calculate_run_at_delay_in_seconds(workflow, now_in_utc)
|
||||
_, delay, _ = WorkflowService.next_start_event_configuration(workflow, now_in_utc) # type: ignore
|
||||
assert delay == 0
|
||||
|
||||
def test_run_at_delay_is_30_for_30_second_duration_start_timer_event(self, now_in_utc: datetime) -> None:
|
||||
|
@ -106,7 +105,7 @@ class TestWorkflowService(BaseTest):
|
|||
""",
|
||||
"Process_aldvgey",
|
||||
)
|
||||
delay = WorkflowService.calculate_run_at_delay_in_seconds(workflow, now_in_utc)
|
||||
_, delay, _ = WorkflowService.next_start_event_configuration(workflow, now_in_utc) # type: ignore
|
||||
assert delay == 30
|
||||
|
||||
def test_run_at_delay_is_300_if_5_mins_before_date_start_timer_event(
|
||||
|
@ -129,5 +128,7 @@ class TestWorkflowService(BaseTest):
|
|||
""",
|
||||
"Process_aldvgey",
|
||||
)
|
||||
delay = WorkflowService.calculate_run_at_delay_in_seconds(workflow, example_start_datetime_minus_5_mins_in_utc)
|
||||
_, delay, _ = WorkflowService.next_start_event_configuration(
|
||||
workflow, example_start_datetime_minus_5_mins_in_utc
|
||||
) # type: ignore
|
||||
assert delay == 300
|
||||
|
|
Loading…
Reference in New Issue