This commit is contained in:
usama9500 2023-05-26 19:12:12 +05:00
commit bdef85a209
22 changed files with 462 additions and 62 deletions

3
.gitignore vendored
View File

@ -5,4 +5,5 @@ t
.dccache
version_info.json
.coverage*
UNKNOWN.egg-info/
UNKNOWN.egg-info/
process_models/

28
bin/run_editor Executable file
View File

@ -0,0 +1,28 @@
#!/usr/bin/env bash
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
if [[ -z "${1:-}" ]]; then
>&2 echo "usage: $(basename "$0") [SPIFF_EDITOR_BPMN_SPEC_DIR]"
exit 1
fi
if [[ ! -d "$1" ]]; then
>&2 echo "ERROR: the first argument must be a directory."
exit 1
fi
SPIFF_EDITOR_BPMN_SPEC_DIR=$1 \
docker compose -f editor.docker-compose.yml up -d
echo ""
echo "Spiff Editor is ready."
echo ""
echo "Please open ${SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND:-http://localhost:${SPIFFWORKFLOW_FRONTEND_PORT:-8001}}"

3
bin/stop_editor Executable file
View File

@ -0,0 +1,3 @@
#!/usr/bin/env bash
docker compose -f editor.docker-compose.yml down

3
bin/update_editor Executable file
View File

@ -0,0 +1,3 @@
#!/usr/bin/env bash
docker compose -f editor.docker-compose.yml pull

66
editor.docker-compose.yml Normal file
View File

@ -0,0 +1,66 @@
services:
spiffworkflow-frontend:
container_name: spiffworkflow-frontend
image: ghcr.io/sartography/spiffworkflow-frontend:main-latest
depends_on:
spiffworkflow-backend:
condition: service_healthy
environment:
APPLICATION_ROOT: "/"
PORT0: "${SPIFFWORKFLOW_FRONTEND_PORT:-8001}"
ports:
- "${SPIFFWORKFLOW_FRONTEND_PORT:-8001}:${SPIFFWORKFLOW_FRONTEND_PORT:-8001}/tcp"
spiffworkflow-backend:
container_name: spiffworkflow-backend
image: ghcr.io/sartography/spiffworkflow-backend:main-latest
environment:
SPIFFWORKFLOW_BACKEND_APPLICATION_ROOT: "/"
SPIFFWORKFLOW_BACKEND_ENV: "local_development"
FLASK_DEBUG: "0"
FLASK_SESSION_SECRET_KEY: "${FLASK_SESSION_SECRET_KEY:-super_secret_key}"
# WARNING: Frontend is a static site which assumes frontend port - 1 on localhost.
SPIFFWORKFLOW_BACKEND_URL: "http://localhost:${SPIFF_BACKEND_PORT:-8000}"
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR: "/app/process_models"
SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL: "http://spiffworkflow-connector:8004"
SPIFFWORKFLOW_BACKEND_DATABASE_TYPE: "sqlite"
SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA: "false"
SPIFFWORKFLOW_BACKEND_OPEN_ID_CLIENT_ID: "spiffworkflow-backend"
SPIFFWORKFLOW_BACKEND_OPEN_ID_CLIENT_SECRET_KEY: "my_open_id_secret_key"
SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL: "http://localhost:${SPIFF_BACKEND_PORT:-8000}/openid"
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME: "example.yml"
SPIFFWORKFLOW_BACKEND_PORT: "${SPIFF_BACKEND_PORT:-8000}"
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER: "false"
SPIFFWORKFLOW_BACKEND_UPGRADE_DB: "true"
SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND: "http://localhost:${SPIFFWORKFLOW_FRONTEND_PORT:-8001}"
ports:
- "${SPIFF_BACKEND_PORT:-8000}:${SPIFF_BACKEND_PORT:-8000}/tcp"
volumes:
- "${SPIFF_EDITOR_BPMN_SPEC_DIR:-./process_models}:/app/process_models"
- ./log:/app/log
healthcheck:
test: "curl localhost:${SPIFF_BACKEND_PORT:-8000}/v1.0/status --fail"
interval: 10s
timeout: 5s
retries: 20
spiffworkflow-connector:
container_name: spiffworkflow-connector
image: ghcr.io/sartography/connector-proxy-demo:latest
environment:
FLASK_ENV: "${FLASK_ENV:-development}"
FLASK_DEBUG: "0"
FLASK_SESSION_SECRET_KEY: "${FLASK_SESSION_SECRET_KEY:-super_secret_key}"
CONNECTOR_PROXY_PORT: "${SPIFF_CONNECTOR_PORT:-8004}"
ports:
- "${SPIFF_CONNECTOR_PORT:-8004}:${SPIFF_CONNECTOR_PORT:-8004}/tcp"
healthcheck:
test: "curl localhost:${SPIFF_CONNECTOR_PORT:-8004}/liveness --fail"
interval: 10s
timeout: 5s
retries: 20
volumes:
spiffworkflow_backend:
driver: local

View File

@ -14,13 +14,13 @@ fi
# shellcheck disable=2016
mysql -uroot "$database" -e '
select u.username user, g.identifier group
select u.username username, g.identifier group_name
FROM `user` u
JOIN `user_group_assignment` uga on uga.user_id = u.id
JOIN `group` g on g.id = uga.group_id;
JOIN `user_group_assignment` uga ON uga.user_id = u.id
JOIN `group` g ON g.id = uga.group_id;
select pa.id, g.identifier group_identifier, pt.uri, permission from permission_assignment pa
join principal p on p.id = pa.principal_id
join `group` g on g.id = p.group_id
join permission_target pt on pt.id = pa.permission_target_id;
JOIN principal p ON p.id = pa.principal_id
JOIN `group` g ON g.id = p.group_id
JOIN permission_target pt ON pt.id = pa.permission_target_id;
'

View File

@ -79,6 +79,10 @@ SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS = environ.get(
"SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"
)
SPIFFWORKFLOW_BACKEND_AUTHENTICATION_DISABLED = (
environ.get("SPIFFWORKFLOW_BACKEND_AUTHENTICATION_DISABLED", default="false") == "true"
)
# loggers to use is a comma separated list of logger prefixes that we will be converted to list of strings
SPIFFWORKFLOW_BACKEND_LOGGERS_TO_USE = environ.get("SPIFFWORKFLOW_BACKEND_LOGGERS_TO_USE")

View File

@ -28,12 +28,17 @@ SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = environ.get(
),
)
SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = (
f"https://{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = environ.get(
"SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND",
default=f"https://{environment_identifier_for_this_config_file_only}.spiffworkflow.org",
)
SPIFFWORKFLOW_BACKEND_URL = f"https://api.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = (
f"https://connector-proxy.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
SPIFFWORKFLOW_BACKEND_URL = environ.get(
"SPIFFWORKFLOW_BACKEND_URL",
default=f"https://api.{environment_identifier_for_this_config_file_only}.spiffworkflow.org",
)
SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = environ.get(
"SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL",
default=f"https://connector-proxy.{environment_identifier_for_this_config_file_only}.spiffworkflow.org",
)
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get(
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL",

View File

@ -15,13 +15,14 @@ if TYPE_CHECKING:
from spiffworkflow_backend.models.user import UserModel # noqa: F401
SPIFF_NO_AUTH_ANONYMOUS_GROUP = "spiff_anonymous_group"
class GroupNotFoundError(Exception):
"""GroupNotFoundError."""
pass
class GroupModel(SpiffworkflowBaseDBModel):
"""GroupModel."""
__tablename__ = "group"
__table_args__ = {"extend_existing": True}

View File

@ -32,3 +32,5 @@ class PrincipalModel(SpiffworkflowBaseDBModel):
user = relationship("UserModel", viewonly=True)
group = relationship("GroupModel", viewonly=True)
permission_assignments = relationship("PermissionAssignmentModel", cascade="delete") # type: ignore

View File

@ -15,14 +15,15 @@ from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.group import GroupModel
SPIFF_NO_AUTH_ANONYMOUS_USER = "spiff_anonymous_user"
class UserNotFoundError(Exception):
"""UserNotFoundError."""
pass
@dataclass
class UserModel(SpiffworkflowBaseDBModel):
"""UserModel."""
__tablename__ = "user"
__table_args__ = (db.UniqueConstraint("service", "service_id", name="service_key"),)
@ -47,9 +48,9 @@ class UserModel(SpiffworkflowBaseDBModel):
secondary="user_group_assignment",
overlaps="user_group_assignments,users",
)
principal = relationship("PrincipalModel", uselist=False) # type: ignore
principal = relationship("PrincipalModel", uselist=False, cascade="delete") # type: ignore
def encode_auth_token(self) -> str:
def encode_auth_token(self, extra_payload: dict | None = None) -> str:
"""Generate the Auth Token.
:return: string
@ -59,12 +60,16 @@ class UserModel(SpiffworkflowBaseDBModel):
raise KeyError("we need current_app.config to have a SECRET_KEY")
# hours = float(app.config['TOKEN_AUTH_TTL_HOURS'])
payload = {
# 'exp': datetime.datetime.utcnow() + datetime.timedelta(hours=hours, minutes=0, seconds=0),
# 'iat': datetime.datetime.utcnow(),
base_payload = {
"email": self.email,
"preferred_username": self.username,
"sub": f"service:{self.service}::service_id:{self.service_id}",
"token_type": "internal",
}
payload = base_payload
if extra_payload is not None:
payload = {**base_payload, **extra_payload}
return jwt.encode(
payload,
secret_key,

View File

@ -6,7 +6,6 @@ import re
from typing import Any
from typing import Dict
from typing import Optional
from typing import Union
import flask
import jwt
@ -20,6 +19,10 @@ from werkzeug.wrappers import Response
from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.group import SPIFF_NO_AUTH_ANONYMOUS_GROUP
from spiffworkflow_backend.models.user import SPIFF_NO_AUTH_ANONYMOUS_USER
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authentication_service import AuthenticationService
from spiffworkflow_backend.services.authentication_service import (
@ -27,6 +30,7 @@ from spiffworkflow_backend.services.authentication_service import (
)
from spiffworkflow_backend.services.authentication_service import TokenExpiredError
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.group_service import GroupService
from spiffworkflow_backend.services.user_service import UserService
"""
@ -36,9 +40,7 @@ from spiffworkflow_backend.services.user_service import UserService
# authorization_exclusion_list = ['status']
def verify_token(
token: Optional[str] = None, force_run: Optional[bool] = False
) -> Optional[Dict[str, Optional[Union[str, int]]]]:
def verify_token(token: Optional[str] = None, force_run: Optional[bool] = False) -> None:
"""Verify the token for the user (if provided).
If in production environment and token is not provided, gets user from the SSO headers and returns their token.
@ -82,6 +84,22 @@ def verify_token(
current_app.logger.error(
f"Exception in verify_token getting user from decoded internal token. {e}"
)
# if the user is the anonymous user and we have auth enabled then make sure we clean up the anonymouse user
if (
user_model
and not current_app.config.get("SPIFFWORKFLOW_BACKEND_AUTHENTICATION_DISABLED")
and user_model.username == SPIFF_NO_AUTH_ANONYMOUS_USER
and user_model.service_id == "spiff_anonymous_service_id"
):
group_model = GroupModel.query.filter_by(identifier=SPIFF_NO_AUTH_ANONYMOUS_GROUP).first()
db.session.delete(group_model)
db.session.delete(user_model)
db.session.commit()
tld = current_app.config["THREAD_LOCAL_DATA"]
tld.user_has_logged_out = True
return None
elif "iss" in decoded_token.keys():
user_info = None
try:
@ -196,29 +214,22 @@ def set_new_access_token_in_cookie(
return response
def encode_auth_token(sub: str, token_type: Optional[str] = None) -> str:
"""Generates the Auth Token.
:return: string
"""
payload = {"sub": sub}
if token_type is None:
token_type = "internal" # noqa: S105
payload["token_type"] = token_type
if "SECRET_KEY" in current_app.config:
secret_key = current_app.config.get("SECRET_KEY")
else:
current_app.logger.error("Missing SECRET_KEY in encode_auth_token")
raise ApiError(error_code="encode_error", message="Missing SECRET_KEY in encode_auth_token")
return jwt.encode(
payload,
str(secret_key),
algorithm="HS256",
)
def login(redirect_url: str = "/") -> Response:
"""Login."""
if current_app.config.get("SPIFFWORKFLOW_BACKEND_AUTHENTICATION_DISABLED"):
user = UserModel.query.filter_by(username=SPIFF_NO_AUTH_ANONYMOUS_USER).first()
if user is None:
user = UserService.create_user(
SPIFF_NO_AUTH_ANONYMOUS_USER, "spiff_anonymous_service", "spiff_anonymous_service_id"
)
GroupService.add_user_to_group_or_add_to_waiting(user.username, SPIFF_NO_AUTH_ANONYMOUS_GROUP)
AuthorizationService.add_permission_from_uri_or_macro(SPIFF_NO_AUTH_ANONYMOUS_GROUP, "all", "/*")
g.user = user
g.token = user.encode_auth_token({"authentication_disabled": True})
tld = current_app.config["THREAD_LOCAL_DATA"]
tld.new_access_token = g.token
tld.new_id_token = g.token
return redirect(redirect_url)
state = AuthenticationService.generate_state(redirect_url)
login_redirect_url = AuthenticationService().get_login_redirect_url(state.decode("UTF-8"))
return redirect(login_redirect_url)

View File

@ -2,9 +2,13 @@
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
from spiffworkflow_backend.specs.start_event import StartEvent
class MyCustomParser(BpmnDmnParser): # type: ignore
"""A BPMN and DMN parser that can also parse spiffworkflow-specific extensions."""
OVERRIDE_PARSER_CLASSES = BpmnDmnParser.OVERRIDE_PARSER_CLASSES
OVERRIDE_PARSER_CLASSES.update(SpiffBpmnParser.OVERRIDE_PARSER_CLASSES)
StartEvent.register_parser_class(OVERRIDE_PARSER_CLASSES)

View File

@ -101,7 +101,11 @@ from spiffworkflow_backend.services.workflow_execution_service import (
from spiffworkflow_backend.services.workflow_execution_service import (
WorkflowExecutionService,
)
from spiffworkflow_backend.specs.start_event import (
StartEvent,
)
StartEvent.register_converter(SPIFF_SPEC_CONFIG)
# Sorry about all this crap. I wanted to move this thing to another file, but
# importing a bunch of types causes circular imports.

View File

@ -46,6 +46,7 @@ from spiffworkflow_backend.services.process_instance_queue_service import (
ProcessInstanceQueueService,
)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.workflow_service import WorkflowService
class ProcessInstanceService:
@ -54,6 +55,17 @@ class ProcessInstanceService:
FILE_DATA_DIGEST_PREFIX = "spifffiledatadigest+"
TASK_STATE_LOCKED = "locked"
@staticmethod
def calculate_start_delay_in_seconds(process_instance_model: ProcessInstanceModel) -> int:
try:
processor = ProcessInstanceProcessor(process_instance_model)
delay_in_seconds = WorkflowService.calculate_run_at_delay_in_seconds(
processor.bpmn_process_instance, datetime.now(timezone.utc)
)
except Exception:
delay_in_seconds = 0
return delay_in_seconds
@classmethod
def create_process_instance(
cls,
@ -77,7 +89,8 @@ class ProcessInstanceService:
)
db.session.add(process_instance_model)
db.session.commit()
run_at_in_seconds = round(time.time())
delay_in_seconds = cls.calculate_start_delay_in_seconds(process_instance_model)
run_at_in_seconds = round(time.time()) + delay_in_seconds
ProcessInstanceQueueService.enqueue_new_process_instance(process_instance_model, run_at_in_seconds)
return process_instance_model

View File

@ -33,7 +33,6 @@ class UserService:
tenant_specific_field_2: Optional[str] = None,
tenant_specific_field_3: Optional[str] = None,
) -> UserModel:
"""Create_user."""
user_model: Optional[UserModel] = (
UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first()
)

View File

@ -0,0 +1,39 @@
"""workflow_service."""
from datetime import datetime
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
from spiffworkflow_backend.specs.start_event import StartEvent
class WorkflowService:
"""WorkflowService."""
@classmethod
def future_start_events(cls, workflow: BpmnWorkflow) -> list[SpiffTask]:
return [t for t in workflow.get_tasks(TaskState.FUTURE) if isinstance(t.task_spec, StartEvent)]
@classmethod
def next_start_event_delay_in_seconds(cls, workflow: BpmnWorkflow, now_in_utc: datetime) -> int:
start_events = cls.future_start_events(workflow)
start_delays: list[int] = []
for start_event in start_events:
start_delay = start_event.task_spec.start_delay_in_seconds(start_event, now_in_utc)
start_delays.append(start_delay)
start_delays.sort()
return start_delays[0] if len(start_delays) > 0 else 0
@classmethod
def calculate_run_at_delay_in_seconds(cls, workflow: BpmnWorkflow, now_in_utc: datetime) -> int:
# TODO: for now we are using the first start time because I am not sure how multiple
# start events should work. I think the right answer is to take the earliest start
# time and have later start events stay FUTURE/WAITING?, then we need to be able
# to respect the other start events when enqueue'ing.
#
# TODO: this method should also expand to include other FUTURE/WAITING timers when
# enqueue'ing so that we don't have to check timers every 10 or whatever seconds
# right now we assume that this is being called to create a process
return cls.next_start_event_delay_in_seconds(workflow, now_in_utc)

View File

@ -0,0 +1 @@
"""docstring."""

View File

@ -0,0 +1,62 @@
from datetime import datetime
from typing import Any
from typing import Dict
from SpiffWorkflow.bpmn.parser.util import full_tag # type: ignore
from SpiffWorkflow.bpmn.serializer.task_spec import EventConverter # type: ignore
from SpiffWorkflow.bpmn.serializer.task_spec import StartEventConverter as DefaultStartEventConverter
from SpiffWorkflow.bpmn.specs.defaults import StartEvent as DefaultStartEvent # type: ignore
from SpiffWorkflow.bpmn.specs.event_definitions import CycleTimerEventDefinition # type: ignore
from SpiffWorkflow.bpmn.specs.event_definitions import DurationTimerEventDefinition
from SpiffWorkflow.bpmn.specs.event_definitions import NoneEventDefinition
from SpiffWorkflow.bpmn.specs.event_definitions import TimeDateEventDefinition
from SpiffWorkflow.bpmn.specs.event_definitions import TimerEventDefinition
from SpiffWorkflow.spiff.parser.event_parsers import SpiffStartEventParser # type: ignore
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
# TODO: cylce timers and repeat counts?
class StartEvent(DefaultStartEvent): # type: ignore
def __init__(self, wf_spec, bpmn_id, event_definition, **kwargs): # type: ignore
if isinstance(event_definition, TimerEventDefinition):
super().__init__(wf_spec, bpmn_id, NoneEventDefinition(), **kwargs)
self.timer_definition = event_definition
else:
super().__init__(wf_spec, bpmn_id, event_definition, **kwargs)
self.timer_definition = None
@staticmethod
def register_converter(spec_config: Dict[str, Any]) -> None:
spec_config["task_specs"].remove(DefaultStartEventConverter)
spec_config["task_specs"].append(StartEventConverter)
@staticmethod
def register_parser_class(parser_config: Dict[str, Any]) -> None:
parser_config[full_tag("startEvent")] = (SpiffStartEventParser, StartEvent)
def start_delay_in_seconds(self, my_task: SpiffTask, now_in_utc: datetime) -> int:
script_engine = my_task.workflow.script_engine
evaluated_expression = None
parsed_duration = None
if isinstance(self.timer_definition, TimerEventDefinition) and script_engine is not None:
evaluated_expression = script_engine.evaluate(my_task, self.timer_definition.expression)
if evaluated_expression is not None:
if isinstance(self.timer_definition, TimeDateEventDefinition):
parsed_duration = TimerEventDefinition.parse_time_or_duration(evaluated_expression)
time_delta = parsed_duration - now_in_utc
return time_delta.seconds # type: ignore
elif isinstance(self.timer_definition, DurationTimerEventDefinition):
parsed_duration = TimerEventDefinition.parse_iso_duration(evaluated_expression)
time_delta = TimerEventDefinition.get_timedelta_from_start(parsed_duration, now_in_utc)
return time_delta.seconds # type: ignore
elif isinstance(self.timer_definition, CycleTimerEventDefinition):
return 0
return 0
class StartEventConverter(EventConverter): # type: ignore
def __init__(self, registry): # type: ignore
super().__init__(StartEvent, registry)

View File

@ -0,0 +1,135 @@
"""Test_workflow_service."""
from datetime import datetime
from datetime import timedelta
from datetime import timezone
from typing import Generator
import pytest
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from spiffworkflow_backend.services.workflow_service import (
WorkflowService,
)
from spiffworkflow_backend.specs.start_event import StartEvent
BPMN_WRAPPER = """
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL"
xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:dc="http://www.omg.org/spec/DD/20100524/DC"
xmlns:di="http://www.omg.org/spec/DD/20100524/DI"
id="Definitions_96f6665"
targetNamespace="http://bpmn.io/schema/bpmn"
exporter="Camunda Modeler"
exporterVersion="3.0.0-dev"
>
{}
</bpmn:definitions>
"""
@pytest.fixture()
def now_in_utc() -> Generator[datetime, None, None]:
yield datetime.now(timezone.utc)
@pytest.fixture()
def example_start_datetime_in_utc_str() -> Generator[str, None, None]:
yield "2019-10-01T12:00:00+00:00"
@pytest.fixture()
def example_start_datetime_minus_5_mins_in_utc(
example_start_datetime_in_utc_str: str,
) -> Generator[datetime, None, None]:
example_datetime = datetime.fromisoformat(example_start_datetime_in_utc_str)
yield example_datetime - timedelta(minutes=5)
class CustomBpmnDmnParser(BpmnDmnParser): # type: ignore
OVERRIDE_PARSER_CLASSES = {}
OVERRIDE_PARSER_CLASSES.update(BpmnDmnParser.OVERRIDE_PARSER_CLASSES)
OVERRIDE_PARSER_CLASSES.update(SpiffBpmnParser.OVERRIDE_PARSER_CLASSES)
StartEvent.register_parser_class(OVERRIDE_PARSER_CLASSES)
def workflow_from_str(bpmn_str: str, process_id: str) -> BpmnWorkflow:
parser = CustomBpmnDmnParser()
parser.add_bpmn_str(bpmn_str)
top_level = parser.get_spec(process_id)
subprocesses = parser.get_subprocess_specs(process_id)
return BpmnWorkflow(top_level, subprocesses)
def workflow_from_fragment(bpmn_fragment: str, process_id: str) -> BpmnWorkflow:
return workflow_from_str(BPMN_WRAPPER.format(bpmn_fragment), process_id)
class TestWorkflowService(BaseTest):
"""TestWorkflowService."""
def test_run_at_delay_is_0_for_regular_start_events(self, now_in_utc: datetime) -> None:
workflow = workflow_from_fragment(
"""
<bpmn:process id="no_tasks" name="No Tasks" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_184umot</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:endEvent id="Event_0qq9il3">
<bpmn:incoming>Flow_184umot</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_184umot" sourceRef="StartEvent_1" targetRef="Event_0qq9il3" />
</bpmn:process>
""",
"no_tasks",
)
delay = WorkflowService.calculate_run_at_delay_in_seconds(workflow, now_in_utc)
assert delay == 0
def test_run_at_delay_is_30_for_30_second_duration_start_timer_event(self, now_in_utc: datetime) -> None:
workflow = workflow_from_fragment(
"""
<bpmn:process id="Process_aldvgey" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1x1o335</bpmn:outgoing>
<bpmn:timerEventDefinition id="TimerEventDefinition_1vi6a54">
<bpmn:timeDuration xsi:type="bpmn:tFormalExpression">"PT30S"</bpmn:timeDuration>
</bpmn:timerEventDefinition>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1x1o335" sourceRef="StartEvent_1" targetRef="Event_0upbokh" />
<bpmn:endEvent id="Event_0upbokh">
<bpmn:incoming>Flow_1x1o335</bpmn:incoming>
</bpmn:endEvent>
</bpmn:process>
""",
"Process_aldvgey",
)
delay = WorkflowService.calculate_run_at_delay_in_seconds(workflow, now_in_utc)
assert delay == 30
def test_run_at_delay_is_300_if_5_mins_before_date_start_timer_event(
self, example_start_datetime_in_utc_str: str, example_start_datetime_minus_5_mins_in_utc: datetime
) -> None:
workflow = workflow_from_fragment(
f"""
<bpmn:process id="Process_aldvgey" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1x1o335</bpmn:outgoing>
<bpmn:timerEventDefinition id="TimerEventDefinition_1vi6a54">
<bpmn:timeDate xsi:type="bpmn:tFormalExpression">"{example_start_datetime_in_utc_str}"</bpmn:timeDate>
</bpmn:timerEventDefinition>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1x1o335" sourceRef="StartEvent_1" targetRef="Event_0upbokh" />
<bpmn:endEvent id="Event_0upbokh">
<bpmn:incoming>Flow_1x1o335</bpmn:incoming>
</bpmn:endEvent>
</bpmn:process>
""",
"Process_aldvgey",
)
delay = WorkflowService.calculate_run_at_delay_in_seconds(workflow, example_start_datetime_minus_5_mins_in_utc)
assert delay == 300

View File

@ -120,15 +120,19 @@ export default function NavigationBar() {
<a target="_blank" href={documentationUrl} rel="noreferrer">
Documentation
</a>
<hr />
<Button
data-qa="logout-button"
className="button-link"
onClick={handleLogout}
>
<Logout />
&nbsp;&nbsp;Sign out
</Button>
{!UserService.authenticationDisabled() ? (
<>
<hr />
<Button
data-qa="logout-button"
className="button-link"
onClick={handleLogout}
>
<Logout />
&nbsp;&nbsp;Sign out
</Button>
</>
) : null}
</ToggletipContent>
</Toggletip>
</div>

View File

@ -62,6 +62,15 @@ const getUserEmail = () => {
return null;
};
const authenticationDisabled = () => {
const idToken = getIdToken();
if (idToken) {
const idObject = jwt(idToken);
return (idObject as any).authentication_disabled;
}
return false;
};
const getPreferredUsername = () => {
const idToken = getIdToken();
if (idToken) {
@ -82,14 +91,15 @@ const hasRole = (_roles: string[]): boolean => {
};
const UserService = {
authenticationDisabled,
doLogin,
doLogout,
isLoggedIn,
getAccessToken,
loginIfNeeded,
getPreferredUsername,
getUserEmail,
hasRole,
isLoggedIn,
loginIfNeeded,
};
export default UserService;