mirror of
https://github.com/sartography/spiff-arena.git
synced 2025-01-12 02:24:15 +00:00
Merge branch 'main' of github.com:sartography/spiff-arena
This commit is contained in:
commit
8befc5092f
42
spiffworkflow-backend/migrations/versions/e4b6bbf83a3e_.py
Normal file
42
spiffworkflow-backend/migrations/versions/e4b6bbf83a3e_.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
"""empty message
|
||||||
|
|
||||||
|
Revision ID: e4b6bbf83a3e
|
||||||
|
Revises: 6aa02463da9c
|
||||||
|
Create Date: 2023-05-30 10:17:10.595965
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'e4b6bbf83a3e'
|
||||||
|
down_revision = '6aa02463da9c'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('process_model_cycle',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('process_model_identifier', sa.String(length=255), nullable=False),
|
||||||
|
sa.Column('cycle_count', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('duration_in_seconds', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('current_cycle', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('process_model_cycle', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_process_model_cycle_process_model_identifier'), ['process_model_identifier'], unique=False)
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('process_model_cycle', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_process_model_cycle_process_model_identifier'))
|
||||||
|
|
||||||
|
op.drop_table('process_model_cycle')
|
||||||
|
# ### end Alembic commands ###
|
@ -76,5 +76,8 @@ from spiffworkflow_backend.models.process_instance_queue import (
|
|||||||
from spiffworkflow_backend.models.active_user import (
|
from spiffworkflow_backend.models.active_user import (
|
||||||
ActiveUserModel,
|
ActiveUserModel,
|
||||||
) # noqa: F401
|
) # noqa: F401
|
||||||
|
from spiffworkflow_backend.models.process_model_cycle import (
|
||||||
|
ProcessModelCycleModel,
|
||||||
|
) # noqa: F401
|
||||||
|
|
||||||
add_listeners()
|
add_listeners()
|
||||||
|
@ -0,0 +1,19 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||||
|
from spiffworkflow_backend.models.db import db
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ProcessModelCycleModel(SpiffworkflowBaseDBModel):
|
||||||
|
"""ProcessInstanceQueueModel."""
|
||||||
|
|
||||||
|
__tablename__ = "process_model_cycle"
|
||||||
|
|
||||||
|
id: int = db.Column(db.Integer, primary_key=True)
|
||||||
|
process_model_identifier: str = db.Column(db.String(255), nullable=False, index=True)
|
||||||
|
cycle_count: int = db.Column(db.Integer)
|
||||||
|
duration_in_seconds: int = db.Column(db.Integer)
|
||||||
|
current_cycle: int = db.Column(db.Integer)
|
||||||
|
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||||
|
created_at_in_seconds: int = db.Column(db.Integer)
|
@ -1,4 +1,5 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from marshmallow import Schema
|
from marshmallow import Schema
|
||||||
from sqlalchemy import ForeignKey
|
from sqlalchemy import ForeignKey
|
||||||
@ -20,6 +21,15 @@ class SecretModel(SpiffworkflowBaseDBModel):
|
|||||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||||
created_at_in_seconds: int = db.Column(db.Integer)
|
created_at_in_seconds: int = db.Column(db.Integer)
|
||||||
|
|
||||||
|
# value is not included in the serialized output because it is sensitive
|
||||||
|
@property
|
||||||
|
def serialized(self) -> dict[str, Any]:
|
||||||
|
return {
|
||||||
|
"id": self.id,
|
||||||
|
"key": self.key,
|
||||||
|
"user_id": self.user_id,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class SecretModelSchema(Schema):
|
class SecretModelSchema(Schema):
|
||||||
"""SecretModelSchema."""
|
"""SecretModelSchema."""
|
||||||
|
@ -16,7 +16,12 @@ from spiffworkflow_backend.services.user_service import UserService
|
|||||||
def secret_show(key: str) -> Response:
|
def secret_show(key: str) -> Response:
|
||||||
"""Secret_show."""
|
"""Secret_show."""
|
||||||
secret = SecretService.get_secret(key)
|
secret = SecretService.get_secret(key)
|
||||||
return make_response(jsonify(secret), 200)
|
|
||||||
|
# normal serialization does not include the secret value, but this is the one endpoint where we want to return the goods
|
||||||
|
secret_as_dict = secret.serialized
|
||||||
|
secret_as_dict["value"] = SecretService._decrypt(secret.value)
|
||||||
|
|
||||||
|
return make_response(secret_as_dict, 200)
|
||||||
|
|
||||||
|
|
||||||
def secret_list(
|
def secret_list(
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from flask_mail import Message # type: ignore
|
from flask_mail import Message # type: ignore
|
||||||
|
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
from spiffworkflow_backend.models.db import db
|
from spiffworkflow_backend.models.db import db
|
||||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||||
from spiffworkflow_backend.models.message_instance import MessageStatuses
|
from spiffworkflow_backend.models.message_instance import MessageStatuses
|
||||||
|
@ -21,6 +21,7 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
|||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||||
from spiffworkflow_backend.models.process_instance_file_data import ProcessInstanceFileDataModel
|
from spiffworkflow_backend.models.process_instance_file_data import ProcessInstanceFileDataModel
|
||||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||||
|
from spiffworkflow_backend.models.process_model_cycle import ProcessModelCycleModel
|
||||||
from spiffworkflow_backend.models.task import Task
|
from spiffworkflow_backend.models.task import Task
|
||||||
from spiffworkflow_backend.models.user import UserModel
|
from spiffworkflow_backend.models.user import UserModel
|
||||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||||
@ -33,6 +34,7 @@ from spiffworkflow_backend.services.process_instance_queue_service import Proces
|
|||||||
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService
|
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService
|
||||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||||
from spiffworkflow_backend.services.workflow_service import WorkflowService
|
from spiffworkflow_backend.services.workflow_service import WorkflowService
|
||||||
|
from spiffworkflow_backend.specs.start_event import StartConfiguration
|
||||||
|
|
||||||
|
|
||||||
class ProcessInstanceService:
|
class ProcessInstanceService:
|
||||||
@ -42,22 +44,27 @@ class ProcessInstanceService:
|
|||||||
TASK_STATE_LOCKED = "locked"
|
TASK_STATE_LOCKED = "locked"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def calculate_start_delay_in_seconds(process_instance_model: ProcessInstanceModel) -> int:
|
def next_start_event_configuration(process_instance_model: ProcessInstanceModel) -> StartConfiguration:
|
||||||
try:
|
try:
|
||||||
processor = ProcessInstanceProcessor(process_instance_model)
|
processor = ProcessInstanceProcessor(process_instance_model)
|
||||||
delay_in_seconds = WorkflowService.calculate_run_at_delay_in_seconds(
|
start_configuration = WorkflowService.next_start_event_configuration(
|
||||||
processor.bpmn_process_instance, datetime.now(timezone.utc)
|
processor.bpmn_process_instance, datetime.now(timezone.utc)
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
delay_in_seconds = 0
|
start_configuration = None
|
||||||
return delay_in_seconds
|
|
||||||
|
if start_configuration is None:
|
||||||
|
start_configuration = (0, 0, 0)
|
||||||
|
|
||||||
|
return start_configuration
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_process_instance(
|
def create_process_instance(
|
||||||
cls,
|
cls,
|
||||||
process_model: ProcessModelInfo,
|
process_model: ProcessModelInfo,
|
||||||
user: UserModel,
|
user: UserModel,
|
||||||
) -> ProcessInstanceModel:
|
start_configuration: StartConfiguration | None = None,
|
||||||
|
) -> tuple[ProcessInstanceModel, StartConfiguration]:
|
||||||
"""Get_process_instance_from_spec."""
|
"""Get_process_instance_from_spec."""
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
try:
|
try:
|
||||||
@ -75,10 +82,13 @@ class ProcessInstanceService:
|
|||||||
)
|
)
|
||||||
db.session.add(process_instance_model)
|
db.session.add(process_instance_model)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
delay_in_seconds = cls.calculate_start_delay_in_seconds(process_instance_model)
|
|
||||||
|
if start_configuration is None:
|
||||||
|
start_configuration = cls.next_start_event_configuration(process_instance_model)
|
||||||
|
_, delay_in_seconds, _ = start_configuration
|
||||||
run_at_in_seconds = round(time.time()) + delay_in_seconds
|
run_at_in_seconds = round(time.time()) + delay_in_seconds
|
||||||
ProcessInstanceQueueService.enqueue_new_process_instance(process_instance_model, run_at_in_seconds)
|
ProcessInstanceQueueService.enqueue_new_process_instance(process_instance_model, run_at_in_seconds)
|
||||||
return process_instance_model
|
return (process_instance_model, start_configuration)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_process_instance_from_process_model_identifier(
|
def create_process_instance_from_process_model_identifier(
|
||||||
@ -88,7 +98,52 @@ class ProcessInstanceService:
|
|||||||
) -> ProcessInstanceModel:
|
) -> ProcessInstanceModel:
|
||||||
"""Create_process_instance_from_process_model_identifier."""
|
"""Create_process_instance_from_process_model_identifier."""
|
||||||
process_model = ProcessModelService.get_process_model(process_model_identifier)
|
process_model = ProcessModelService.get_process_model(process_model_identifier)
|
||||||
return cls.create_process_instance(process_model, user)
|
process_instance_model, (cycle_count, _, duration_in_seconds) = cls.create_process_instance(
|
||||||
|
process_model, user
|
||||||
|
)
|
||||||
|
cls.register_process_model_cycles(process_model_identifier, cycle_count, duration_in_seconds)
|
||||||
|
return process_instance_model
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def register_process_model_cycles(
|
||||||
|
cls, process_model_identifier: str, cycle_count: int, duration_in_seconds: int
|
||||||
|
) -> None:
|
||||||
|
# clean up old cycle record if it exists. event if the given cycle_count is 0 the previous version
|
||||||
|
# of the model could have included a cycle timer start event
|
||||||
|
cycles = ProcessModelCycleModel.query.filter(
|
||||||
|
ProcessModelCycleModel.process_model_identifier == process_model_identifier,
|
||||||
|
).all()
|
||||||
|
|
||||||
|
for cycle in cycles:
|
||||||
|
db.session.delete(cycle)
|
||||||
|
|
||||||
|
if cycle_count != 0:
|
||||||
|
cycle = ProcessModelCycleModel(
|
||||||
|
process_model_identifier=process_model_identifier,
|
||||||
|
cycle_count=cycle_count,
|
||||||
|
duration_in_seconds=duration_in_seconds,
|
||||||
|
current_cycle=0,
|
||||||
|
)
|
||||||
|
db.session.add(cycle)
|
||||||
|
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def schedule_next_process_model_cycle(cls, process_instance_model: ProcessInstanceModel) -> None:
|
||||||
|
cycle = ProcessModelCycleModel.query.filter(
|
||||||
|
ProcessModelCycleModel.process_model_identifier == process_instance_model.process_model_identifier
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if cycle is None or cycle.cycle_count == 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
if cycle.cycle_count == -1 or cycle.current_cycle < cycle.cycle_count:
|
||||||
|
process_model = ProcessModelService.get_process_model(process_instance_model.process_model_identifier)
|
||||||
|
start_configuration = (cycle.cycle_count, cycle.duration_in_seconds, cycle.duration_in_seconds)
|
||||||
|
cls.create_process_instance(process_model, process_instance_model.process_initiator, start_configuration)
|
||||||
|
cycle.current_cycle += 1
|
||||||
|
db.session.add(cycle)
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def waiting_event_can_be_skipped(cls, waiting_event: dict[str, Any], now_in_utc: datetime) -> bool:
|
def waiting_event_can_be_skipped(cls, waiting_event: dict[str, Any], now_in_utc: datetime) -> bool:
|
||||||
@ -155,6 +210,8 @@ class ProcessInstanceService:
|
|||||||
cls.run_process_instance_with_processor(
|
cls.run_process_instance_with_processor(
|
||||||
process_instance, status_value=status_value, execution_strategy_name=execution_strategy_name
|
process_instance, status_value=status_value, execution_strategy_name=execution_strategy_name
|
||||||
)
|
)
|
||||||
|
if process_instance.status == "complete":
|
||||||
|
cls.schedule_next_process_model_cycle(process_instance)
|
||||||
except ProcessInstanceIsAlreadyLockedError:
|
except ProcessInstanceIsAlreadyLockedError:
|
||||||
continue
|
continue
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||||
from spiffworkflow_backend.models.db import db
|
from spiffworkflow_backend.models.db import db
|
||||||
|
@ -3,6 +3,7 @@ from datetime import datetime
|
|||||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
||||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||||
from SpiffWorkflow.task import TaskState
|
from SpiffWorkflow.task import TaskState
|
||||||
|
from spiffworkflow_backend.specs.start_event import StartConfiguration
|
||||||
from spiffworkflow_backend.specs.start_event import StartEvent
|
from spiffworkflow_backend.specs.start_event import StartEvent
|
||||||
|
|
||||||
|
|
||||||
@ -14,24 +15,13 @@ class WorkflowService:
|
|||||||
return [t for t in workflow.get_tasks(TaskState.FUTURE) if isinstance(t.task_spec, StartEvent)]
|
return [t for t in workflow.get_tasks(TaskState.FUTURE) if isinstance(t.task_spec, StartEvent)]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def next_start_event_delay_in_seconds(cls, workflow: BpmnWorkflow, now_in_utc: datetime) -> int:
|
def next_start_event_configuration(cls, workflow: BpmnWorkflow, now_in_utc: datetime) -> StartConfiguration | None:
|
||||||
start_events = cls.future_start_events(workflow)
|
start_events = cls.future_start_events(workflow)
|
||||||
start_delays: list[int] = []
|
configurations = list(
|
||||||
for start_event in start_events:
|
map(
|
||||||
start_delay = start_event.task_spec.start_delay_in_seconds(start_event, now_in_utc)
|
lambda start_event: start_event.task_spec.configuration(start_event, now_in_utc), # type: ignore
|
||||||
start_delays.append(start_delay)
|
start_events,
|
||||||
start_delays.sort()
|
)
|
||||||
return start_delays[0] if len(start_delays) > 0 else 0
|
)
|
||||||
|
configurations.sort(key=lambda configuration: configuration[1]) # type: ignore
|
||||||
@classmethod
|
return configurations[0] if len(configurations) > 0 else None
|
||||||
def calculate_run_at_delay_in_seconds(cls, workflow: BpmnWorkflow, now_in_utc: datetime) -> int:
|
|
||||||
# TODO: for now we are using the first start time because I am not sure how multiple
|
|
||||||
# start events should work. I think the right answer is to take the earliest start
|
|
||||||
# time and have later start events stay FUTURE/WAITING?, then we need to be able
|
|
||||||
# to respect the other start events when enqueue'ing.
|
|
||||||
#
|
|
||||||
# TODO: this method should also expand to include other FUTURE/WAITING timers when
|
|
||||||
# enqueue'ing so that we don't have to check timers every 10 or whatever seconds
|
|
||||||
# right now we assume that this is being called to create a process
|
|
||||||
|
|
||||||
return cls.next_start_event_delay_in_seconds(workflow, now_in_utc)
|
|
||||||
|
@ -13,8 +13,11 @@ from SpiffWorkflow.bpmn.specs.event_definitions import TimerEventDefinition
|
|||||||
from SpiffWorkflow.spiff.parser.event_parsers import SpiffStartEventParser # type: ignore
|
from SpiffWorkflow.spiff.parser.event_parsers import SpiffStartEventParser # type: ignore
|
||||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||||
|
|
||||||
|
StartConfiguration = tuple[int, int, int]
|
||||||
|
|
||||||
# TODO: cylce timers and repeat counts?
|
# TODO: cylce timers and repeat counts?
|
||||||
|
|
||||||
|
|
||||||
class StartEvent(DefaultStartEvent): # type: ignore
|
class StartEvent(DefaultStartEvent): # type: ignore
|
||||||
def __init__(self, wf_spec, bpmn_id, event_definition, **kwargs): # type: ignore
|
def __init__(self, wf_spec, bpmn_id, event_definition, **kwargs): # type: ignore
|
||||||
if isinstance(event_definition, TimerEventDefinition):
|
if isinstance(event_definition, TimerEventDefinition):
|
||||||
@ -33,27 +36,36 @@ class StartEvent(DefaultStartEvent): # type: ignore
|
|||||||
def register_parser_class(parser_config: dict[str, Any]) -> None:
|
def register_parser_class(parser_config: dict[str, Any]) -> None:
|
||||||
parser_config[full_tag("startEvent")] = (SpiffStartEventParser, StartEvent)
|
parser_config[full_tag("startEvent")] = (SpiffStartEventParser, StartEvent)
|
||||||
|
|
||||||
def start_delay_in_seconds(self, my_task: SpiffTask, now_in_utc: datetime) -> int:
|
def configuration(self, my_task: SpiffTask, now_in_utc: datetime) -> StartConfiguration:
|
||||||
script_engine = my_task.workflow.script_engine
|
evaluated_expression = self.evaluated_timer_expression(my_task)
|
||||||
evaluated_expression = None
|
cycles = 0
|
||||||
parsed_duration = None
|
start_delay_in_seconds = 0
|
||||||
|
duration = 0
|
||||||
if isinstance(self.timer_definition, TimerEventDefinition) and script_engine is not None:
|
|
||||||
evaluated_expression = script_engine.evaluate(my_task, self.timer_definition.expression)
|
|
||||||
|
|
||||||
if evaluated_expression is not None:
|
if evaluated_expression is not None:
|
||||||
if isinstance(self.timer_definition, TimeDateEventDefinition):
|
if isinstance(self.timer_definition, TimeDateEventDefinition):
|
||||||
parsed_duration = TimerEventDefinition.parse_time_or_duration(evaluated_expression)
|
parsed_duration = TimerEventDefinition.parse_time_or_duration(evaluated_expression)
|
||||||
time_delta = parsed_duration - now_in_utc
|
time_delta = parsed_duration - now_in_utc
|
||||||
return time_delta.seconds # type: ignore
|
start_delay_in_seconds = time_delta.seconds
|
||||||
elif isinstance(self.timer_definition, DurationTimerEventDefinition):
|
elif isinstance(self.timer_definition, DurationTimerEventDefinition):
|
||||||
parsed_duration = TimerEventDefinition.parse_iso_duration(evaluated_expression)
|
parsed_duration = TimerEventDefinition.parse_iso_duration(evaluated_expression)
|
||||||
time_delta = TimerEventDefinition.get_timedelta_from_start(parsed_duration, now_in_utc)
|
time_delta = TimerEventDefinition.get_timedelta_from_start(parsed_duration, now_in_utc)
|
||||||
return time_delta.seconds # type: ignore
|
start_delay_in_seconds = time_delta.seconds
|
||||||
elif isinstance(self.timer_definition, CycleTimerEventDefinition):
|
elif isinstance(self.timer_definition, CycleTimerEventDefinition):
|
||||||
return 0
|
cycles, start, cycle_duration = TimerEventDefinition.parse_iso_recurring_interval(evaluated_expression)
|
||||||
|
time_delta = start - now_in_utc + cycle_duration
|
||||||
|
start_delay_in_seconds = time_delta.seconds
|
||||||
|
duration = cycle_duration.seconds
|
||||||
|
|
||||||
return 0
|
return (cycles, start_delay_in_seconds, duration)
|
||||||
|
|
||||||
|
def evaluated_timer_expression(self, my_task: SpiffTask) -> Any:
|
||||||
|
script_engine = my_task.workflow.script_engine
|
||||||
|
evaluated_expression = None
|
||||||
|
|
||||||
|
if isinstance(self.timer_definition, TimerEventDefinition) and script_engine is not None:
|
||||||
|
evaluated_expression = script_engine.evaluate(my_task, self.timer_definition.expression)
|
||||||
|
return evaluated_expression
|
||||||
|
|
||||||
|
|
||||||
class StartEventConverter(EventConverter): # type: ignore
|
class StartEventConverter(EventConverter): # type: ignore
|
||||||
|
@ -6,11 +6,9 @@ from flask.testing import FlaskClient
|
|||||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||||
from spiffworkflow_backend.models.secret_model import SecretModel
|
from spiffworkflow_backend.models.secret_model import SecretModel
|
||||||
from spiffworkflow_backend.models.secret_model import SecretModelSchema
|
|
||||||
from spiffworkflow_backend.models.user import UserModel
|
from spiffworkflow_backend.models.user import UserModel
|
||||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||||
from spiffworkflow_backend.services.secret_service import SecretService
|
from spiffworkflow_backend.services.secret_service import SecretService
|
||||||
from werkzeug.test import TestResponse # type: ignore
|
|
||||||
|
|
||||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||||
|
|
||||||
@ -163,115 +161,3 @@ class TestSecretService(SecretServiceTestHelpers):
|
|||||||
with pytest.raises(ApiError) as ae:
|
with pytest.raises(ApiError) as ae:
|
||||||
SecretService.delete_secret(self.test_key + "x", with_super_admin_user.id)
|
SecretService.delete_secret(self.test_key + "x", with_super_admin_user.id)
|
||||||
assert "Resource does not exist" in ae.value.message
|
assert "Resource does not exist" in ae.value.message
|
||||||
|
|
||||||
|
|
||||||
class TestSecretServiceApi(SecretServiceTestHelpers):
|
|
||||||
"""TestSecretServiceApi."""
|
|
||||||
|
|
||||||
def test_add_secret(
|
|
||||||
self,
|
|
||||||
app: Flask,
|
|
||||||
client: FlaskClient,
|
|
||||||
with_db_and_bpmn_file_cleanup: None,
|
|
||||||
with_super_admin_user: UserModel,
|
|
||||||
) -> None:
|
|
||||||
"""Test_add_secret."""
|
|
||||||
secret_model = SecretModel(
|
|
||||||
key=self.test_key,
|
|
||||||
value=self.test_value,
|
|
||||||
user_id=with_super_admin_user.id,
|
|
||||||
)
|
|
||||||
data = json.dumps(SecretModelSchema().dump(secret_model))
|
|
||||||
response: TestResponse = client.post(
|
|
||||||
"/v1.0/secrets",
|
|
||||||
headers=self.logged_in_headers(with_super_admin_user),
|
|
||||||
content_type="application/json",
|
|
||||||
data=data,
|
|
||||||
)
|
|
||||||
assert response.json
|
|
||||||
secret: dict = response.json
|
|
||||||
for key in ["key", "value", "user_id"]:
|
|
||||||
assert key in secret.keys()
|
|
||||||
assert secret["key"] == self.test_key
|
|
||||||
assert SecretService._decrypt(secret["value"]) == self.test_value
|
|
||||||
assert secret["user_id"] == with_super_admin_user.id
|
|
||||||
|
|
||||||
def test_get_secret(
|
|
||||||
self,
|
|
||||||
app: Flask,
|
|
||||||
client: FlaskClient,
|
|
||||||
with_db_and_bpmn_file_cleanup: None,
|
|
||||||
with_super_admin_user: UserModel,
|
|
||||||
) -> None:
|
|
||||||
"""Test get secret."""
|
|
||||||
self.add_test_secret(with_super_admin_user)
|
|
||||||
secret_response = client.get(
|
|
||||||
f"/v1.0/secrets/{self.test_key}",
|
|
||||||
headers=self.logged_in_headers(with_super_admin_user),
|
|
||||||
)
|
|
||||||
assert secret_response
|
|
||||||
assert secret_response.status_code == 200
|
|
||||||
assert secret_response.json
|
|
||||||
assert SecretService._decrypt(secret_response.json["value"]) == self.test_value
|
|
||||||
|
|
||||||
def test_update_secret(
|
|
||||||
self,
|
|
||||||
app: Flask,
|
|
||||||
client: FlaskClient,
|
|
||||||
with_db_and_bpmn_file_cleanup: None,
|
|
||||||
with_super_admin_user: UserModel,
|
|
||||||
) -> None:
|
|
||||||
"""Test_update_secret."""
|
|
||||||
self.add_test_secret(with_super_admin_user)
|
|
||||||
secret: SecretModel | None = SecretService.get_secret(self.test_key)
|
|
||||||
assert secret
|
|
||||||
assert SecretService._decrypt(secret.value) == self.test_value
|
|
||||||
secret_model = SecretModel(
|
|
||||||
key=self.test_key,
|
|
||||||
value="new_secret_value",
|
|
||||||
user_id=with_super_admin_user.id,
|
|
||||||
)
|
|
||||||
response = client.put(
|
|
||||||
f"/v1.0/secrets/{self.test_key}",
|
|
||||||
headers=self.logged_in_headers(with_super_admin_user),
|
|
||||||
content_type="application/json",
|
|
||||||
data=json.dumps(SecretModelSchema().dump(secret_model)),
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
secret_model = SecretModel.query.filter(SecretModel.key == self.test_key).first()
|
|
||||||
assert SecretService._decrypt(secret_model.value) == "new_secret_value"
|
|
||||||
|
|
||||||
def test_delete_secret(
|
|
||||||
self,
|
|
||||||
app: Flask,
|
|
||||||
client: FlaskClient,
|
|
||||||
with_db_and_bpmn_file_cleanup: None,
|
|
||||||
with_super_admin_user: UserModel,
|
|
||||||
) -> None:
|
|
||||||
"""Test delete secret."""
|
|
||||||
self.add_test_secret(with_super_admin_user)
|
|
||||||
secret = SecretService.get_secret(self.test_key)
|
|
||||||
assert secret
|
|
||||||
assert SecretService._decrypt(secret.value) == self.test_value
|
|
||||||
secret_response = client.delete(
|
|
||||||
f"/v1.0/secrets/{self.test_key}",
|
|
||||||
headers=self.logged_in_headers(with_super_admin_user),
|
|
||||||
)
|
|
||||||
assert secret_response.status_code == 200
|
|
||||||
with pytest.raises(ApiError):
|
|
||||||
secret = SecretService.get_secret(self.test_key)
|
|
||||||
|
|
||||||
def test_delete_secret_bad_key(
|
|
||||||
self,
|
|
||||||
app: Flask,
|
|
||||||
client: FlaskClient,
|
|
||||||
with_db_and_bpmn_file_cleanup: None,
|
|
||||||
with_super_admin_user: UserModel,
|
|
||||||
) -> None:
|
|
||||||
"""Test delete secret."""
|
|
||||||
secret_response = client.delete(
|
|
||||||
"/v1.0/secrets/bad_secret_key",
|
|
||||||
headers=self.logged_in_headers(with_super_admin_user),
|
|
||||||
)
|
|
||||||
assert secret_response.status_code == 404
|
|
||||||
|
@ -0,0 +1,139 @@
|
|||||||
|
import json
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from flask.app import Flask
|
||||||
|
from flask.testing import FlaskClient
|
||||||
|
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||||
|
from spiffworkflow_backend.models.secret_model import SecretModel
|
||||||
|
from spiffworkflow_backend.models.secret_model import SecretModelSchema
|
||||||
|
from spiffworkflow_backend.models.user import UserModel
|
||||||
|
from spiffworkflow_backend.services.secret_service import SecretService
|
||||||
|
|
||||||
|
from tests.spiffworkflow_backend.integration.test_secret_service import SecretServiceTestHelpers
|
||||||
|
|
||||||
|
|
||||||
|
class TestSecretsController(SecretServiceTestHelpers):
|
||||||
|
def test_add_secret(
|
||||||
|
self,
|
||||||
|
app: Flask,
|
||||||
|
client: FlaskClient,
|
||||||
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
|
with_super_admin_user: UserModel,
|
||||||
|
) -> None:
|
||||||
|
"""Test_add_secret."""
|
||||||
|
secret_model = SecretModel(
|
||||||
|
key=self.test_key,
|
||||||
|
value=self.test_value,
|
||||||
|
user_id=with_super_admin_user.id,
|
||||||
|
)
|
||||||
|
data = json.dumps(SecretModelSchema().dump(secret_model))
|
||||||
|
response = client.post(
|
||||||
|
"/v1.0/secrets",
|
||||||
|
headers=self.logged_in_headers(with_super_admin_user),
|
||||||
|
content_type="application/json",
|
||||||
|
data=data,
|
||||||
|
)
|
||||||
|
assert response.json
|
||||||
|
secret: dict = response.json
|
||||||
|
for key in ["key", "value", "user_id"]:
|
||||||
|
assert key in secret.keys()
|
||||||
|
assert secret["key"] == self.test_key
|
||||||
|
assert SecretService._decrypt(secret["value"]) == self.test_value
|
||||||
|
assert secret["user_id"] == with_super_admin_user.id
|
||||||
|
|
||||||
|
def test_get_secret(
|
||||||
|
self,
|
||||||
|
app: Flask,
|
||||||
|
client: FlaskClient,
|
||||||
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
|
with_super_admin_user: UserModel,
|
||||||
|
) -> None:
|
||||||
|
"""Test get secret."""
|
||||||
|
self.add_test_secret(with_super_admin_user)
|
||||||
|
secret_response = client.get(
|
||||||
|
f"/v1.0/secrets/{self.test_key}",
|
||||||
|
headers=self.logged_in_headers(with_super_admin_user),
|
||||||
|
)
|
||||||
|
assert secret_response
|
||||||
|
assert secret_response.status_code == 200
|
||||||
|
assert secret_response.json
|
||||||
|
assert SecretService._decrypt(secret_response.json["value"]) == self.test_value
|
||||||
|
|
||||||
|
def test_update_secret(
|
||||||
|
self,
|
||||||
|
app: Flask,
|
||||||
|
client: FlaskClient,
|
||||||
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
|
with_super_admin_user: UserModel,
|
||||||
|
) -> None:
|
||||||
|
"""Test_update_secret."""
|
||||||
|
self.add_test_secret(with_super_admin_user)
|
||||||
|
secret: SecretModel | None = SecretService.get_secret(self.test_key)
|
||||||
|
assert secret
|
||||||
|
assert SecretService._decrypt(secret.value) == self.test_value
|
||||||
|
secret_model = SecretModel(
|
||||||
|
key=self.test_key,
|
||||||
|
value="new_secret_value",
|
||||||
|
user_id=with_super_admin_user.id,
|
||||||
|
)
|
||||||
|
response = client.put(
|
||||||
|
f"/v1.0/secrets/{self.test_key}",
|
||||||
|
headers=self.logged_in_headers(with_super_admin_user),
|
||||||
|
content_type="application/json",
|
||||||
|
data=json.dumps(SecretModelSchema().dump(secret_model)),
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
secret_model = SecretModel.query.filter(SecretModel.key == self.test_key).first()
|
||||||
|
assert SecretService._decrypt(secret_model.value) == "new_secret_value"
|
||||||
|
|
||||||
|
def test_delete_secret(
|
||||||
|
self,
|
||||||
|
app: Flask,
|
||||||
|
client: FlaskClient,
|
||||||
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
|
with_super_admin_user: UserModel,
|
||||||
|
) -> None:
|
||||||
|
"""Test delete secret."""
|
||||||
|
self.add_test_secret(with_super_admin_user)
|
||||||
|
secret = SecretService.get_secret(self.test_key)
|
||||||
|
assert secret
|
||||||
|
assert SecretService._decrypt(secret.value) == self.test_value
|
||||||
|
secret_response = client.delete(
|
||||||
|
f"/v1.0/secrets/{self.test_key}",
|
||||||
|
headers=self.logged_in_headers(with_super_admin_user),
|
||||||
|
)
|
||||||
|
assert secret_response.status_code == 200
|
||||||
|
with pytest.raises(ApiError):
|
||||||
|
secret = SecretService.get_secret(self.test_key)
|
||||||
|
|
||||||
|
def test_delete_secret_bad_key(
|
||||||
|
self,
|
||||||
|
app: Flask,
|
||||||
|
client: FlaskClient,
|
||||||
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
|
with_super_admin_user: UserModel,
|
||||||
|
) -> None:
|
||||||
|
"""Test delete secret."""
|
||||||
|
secret_response = client.delete(
|
||||||
|
"/v1.0/secrets/bad_secret_key",
|
||||||
|
headers=self.logged_in_headers(with_super_admin_user),
|
||||||
|
)
|
||||||
|
assert secret_response.status_code == 404
|
||||||
|
|
||||||
|
def test_secret_list(
|
||||||
|
self,
|
||||||
|
app: Flask,
|
||||||
|
client: FlaskClient,
|
||||||
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
|
with_super_admin_user: UserModel,
|
||||||
|
) -> None:
|
||||||
|
self.add_test_secret(with_super_admin_user)
|
||||||
|
secret_response = client.get(
|
||||||
|
"/v1.0/secrets",
|
||||||
|
headers=self.logged_in_headers(with_super_admin_user),
|
||||||
|
)
|
||||||
|
assert secret_response.status_code == 200
|
||||||
|
first_secret_in_results = secret_response.json["results"][0]
|
||||||
|
assert first_secret_in_results["key"] == self.test_key
|
||||||
|
assert "value" not in first_secret_in_results
|
@ -84,7 +84,7 @@ class TestWorkflowService(BaseTest):
|
|||||||
""",
|
""",
|
||||||
"no_tasks",
|
"no_tasks",
|
||||||
)
|
)
|
||||||
delay = WorkflowService.calculate_run_at_delay_in_seconds(workflow, now_in_utc)
|
_, delay, _ = WorkflowService.next_start_event_configuration(workflow, now_in_utc) # type: ignore
|
||||||
assert delay == 0
|
assert delay == 0
|
||||||
|
|
||||||
def test_run_at_delay_is_30_for_30_second_duration_start_timer_event(self, now_in_utc: datetime) -> None:
|
def test_run_at_delay_is_30_for_30_second_duration_start_timer_event(self, now_in_utc: datetime) -> None:
|
||||||
@ -105,7 +105,7 @@ class TestWorkflowService(BaseTest):
|
|||||||
""",
|
""",
|
||||||
"Process_aldvgey",
|
"Process_aldvgey",
|
||||||
)
|
)
|
||||||
delay = WorkflowService.calculate_run_at_delay_in_seconds(workflow, now_in_utc)
|
_, delay, _ = WorkflowService.next_start_event_configuration(workflow, now_in_utc) # type: ignore
|
||||||
assert delay == 30
|
assert delay == 30
|
||||||
|
|
||||||
def test_run_at_delay_is_300_if_5_mins_before_date_start_timer_event(
|
def test_run_at_delay_is_300_if_5_mins_before_date_start_timer_event(
|
||||||
@ -128,5 +128,7 @@ class TestWorkflowService(BaseTest):
|
|||||||
""",
|
""",
|
||||||
"Process_aldvgey",
|
"Process_aldvgey",
|
||||||
)
|
)
|
||||||
delay = WorkflowService.calculate_run_at_delay_in_seconds(workflow, example_start_datetime_minus_5_mins_in_utc)
|
_, delay, _ = WorkflowService.next_start_event_configuration(
|
||||||
|
workflow, example_start_datetime_minus_5_mins_in_utc
|
||||||
|
) # type: ignore
|
||||||
assert delay == 300
|
assert delay == 300
|
||||||
|
@ -40,6 +40,12 @@ export default function ActiveUsers() {
|
|||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, []); // it is critical to only run this once.
|
}, []); // it is critical to only run this once.
|
||||||
|
|
||||||
|
// activeUsers is supposed to be an array, but it is based on the response body
|
||||||
|
// from a network call, so who knows what might happen. Be safe.
|
||||||
|
if (!activeUsers.map) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
const au = activeUsers.map((activeUser: User) => {
|
const au = activeUsers.map((activeUser: User) => {
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
|
@ -10,8 +10,8 @@ import { Button } from '@carbon/react';
|
|||||||
|
|
||||||
type OwnProps = {
|
type OwnProps = {
|
||||||
title: string;
|
title: string;
|
||||||
children: React.ReactNode;
|
children?: React.ReactNode;
|
||||||
onClose: (..._args: any[]) => any;
|
onClose: Function;
|
||||||
type?: string;
|
type?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
import { useEffect, useState } from 'react';
|
import { useEffect, useState } from 'react';
|
||||||
import { useParams, useNavigate } from 'react-router-dom';
|
import { useParams, useNavigate } from 'react-router-dom';
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
import { Stack, Table, Button } from '@carbon/react';
|
import { Stack, Table, Button, TextInput } from '@carbon/react';
|
||||||
import HttpService from '../services/HttpService';
|
import HttpService from '../services/HttpService';
|
||||||
import { Secret } from '../interfaces';
|
import { Secret } from '../interfaces';
|
||||||
|
import { Notification } from '../components/Notification';
|
||||||
import ButtonWithConfirmation from '../components/ButtonWithConfirmation';
|
import ButtonWithConfirmation from '../components/ButtonWithConfirmation';
|
||||||
|
|
||||||
export default function SecretShow() {
|
export default function SecretShow() {
|
||||||
@ -11,7 +12,9 @@ export default function SecretShow() {
|
|||||||
const params = useParams();
|
const params = useParams();
|
||||||
|
|
||||||
const [secret, setSecret] = useState<Secret | null>(null);
|
const [secret, setSecret] = useState<Secret | null>(null);
|
||||||
const [secretValue, setSecretValue] = useState(secret?.value);
|
const [displaySecretValue, setDisplaySecretValue] = useState<boolean>(false);
|
||||||
|
const [showSuccessNotification, setShowSuccessNotification] =
|
||||||
|
useState<boolean>(false);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
HttpService.makeCallToBackend({
|
HttpService.makeCallToBackend({
|
||||||
@ -22,22 +25,21 @@ export default function SecretShow() {
|
|||||||
|
|
||||||
const handleSecretValueChange = (event: any) => {
|
const handleSecretValueChange = (event: any) => {
|
||||||
if (secret) {
|
if (secret) {
|
||||||
setSecretValue(event.target.value);
|
const newSecret = { ...secret, value: event.target.value };
|
||||||
|
setSecret(newSecret);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const updateSecretValue = () => {
|
const updateSecretValue = () => {
|
||||||
if (secret && secretValue) {
|
if (secret) {
|
||||||
secret.value = secretValue;
|
|
||||||
HttpService.makeCallToBackend({
|
HttpService.makeCallToBackend({
|
||||||
path: `/secrets/${secret.key}`,
|
path: `/secrets/${secret.key}`,
|
||||||
successCallback: () => {
|
successCallback: () => {
|
||||||
setSecret(secret);
|
setShowSuccessNotification(true);
|
||||||
},
|
},
|
||||||
httpMethod: 'PUT',
|
httpMethod: 'PUT',
|
||||||
postBody: {
|
postBody: {
|
||||||
value: secretValue,
|
value: secret.value,
|
||||||
creator_user_id: secret.creator_user_id,
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -58,9 +60,17 @@ export default function SecretShow() {
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const successNotificationComponent = (
|
||||||
|
<Notification
|
||||||
|
title="Secret updated"
|
||||||
|
onClose={() => setShowSuccessNotification(false)}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
|
||||||
if (secret) {
|
if (secret) {
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
|
{showSuccessNotification && successNotificationComponent}
|
||||||
<h1>Secret Key: {secret.key}</h1>
|
<h1>Secret Key: {secret.key}</h1>
|
||||||
<Stack orientation="horizontal" gap={3}>
|
<Stack orientation="horizontal" gap={3}>
|
||||||
<ButtonWithConfirmation
|
<ButtonWithConfirmation
|
||||||
@ -68,8 +78,14 @@ export default function SecretShow() {
|
|||||||
onConfirmation={deleteSecret}
|
onConfirmation={deleteSecret}
|
||||||
buttonLabel="Delete"
|
buttonLabel="Delete"
|
||||||
/>
|
/>
|
||||||
<Button variant="warning" onClick={updateSecretValue}>
|
<Button
|
||||||
Update Value
|
disabled={displaySecretValue}
|
||||||
|
variant="warning"
|
||||||
|
onClick={() => {
|
||||||
|
setDisplaySecretValue(true);
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
Retrieve secret value
|
||||||
</Button>
|
</Button>
|
||||||
</Stack>
|
</Stack>
|
||||||
<div>
|
<div>
|
||||||
@ -77,21 +93,36 @@ export default function SecretShow() {
|
|||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th>Key</th>
|
<th>Key</th>
|
||||||
<th>Value</th>
|
{displaySecretValue && (
|
||||||
|
<>
|
||||||
|
<th>Value</th>
|
||||||
|
<th>Actions</th>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr>
|
<tr>
|
||||||
<td>{params.key}</td>
|
<td>{params.key}</td>
|
||||||
<td>
|
{displaySecretValue && (
|
||||||
<input
|
<>
|
||||||
id="secret_value"
|
<td>
|
||||||
name="secret_value"
|
<TextInput
|
||||||
type="text"
|
id="secret_value"
|
||||||
value={secretValue || secret.value}
|
name="secret_value"
|
||||||
onChange={handleSecretValueChange}
|
value={secret.value}
|
||||||
/>
|
onChange={handleSecretValueChange}
|
||||||
</td>
|
/>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
{displaySecretValue && (
|
||||||
|
<Button variant="warning" onClick={updateSecretValue}>
|
||||||
|
Update Value
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
</tr>
|
</tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</Table>
|
</Table>
|
||||||
|
Loading…
x
Reference in New Issue
Block a user