From 3e82ac5f071dc1c51f9f5ed5582e09863f0450ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Soko=C5=82owski?= Date: Mon, 30 Jan 2023 16:18:17 +0100 Subject: [PATCH 01/40] backend: avoid redundant steps in Dockerfile MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Use separate `base`, `setup` and `final` to avoid redundat steps. Avoid runnig `poetry` twice, and add `source` and `description`. Signed-off-by: Jakub Sokołowski --- spiffworkflow-backend/Dockerfile | 43 +++++++++++++++++++------------- 1 file changed, 25 insertions(+), 18 deletions(-) diff --git a/spiffworkflow-backend/Dockerfile b/spiffworkflow-backend/Dockerfile index e2d89beb..f05f1973 100644 --- a/spiffworkflow-backend/Dockerfile +++ b/spiffworkflow-backend/Dockerfile @@ -1,28 +1,35 @@ -FROM ghcr.io/sartography/python:3.11 +# Base image to share ENV vars that activate VENV. +FROM ghcr.io/sartography/python:3.11 AS base + +ENV VIRTUAL_ENV=/app/venv +RUN python3 -m venv $VIRTUAL_ENV +ENV PATH="$VIRTUAL_ENV/bin:$PATH" + +WORKDIR /app + +# Setup image for installing Python dependencies. +FROM base AS setup RUN pip install poetry RUN useradd _gunicorn --no-create-home --user-group -RUN apt-get update && \ - apt-get install -y -q \ - gcc libssl-dev \ - curl git-core libpq-dev \ - gunicorn3 default-mysql-client +RUN apt-get update \ + && apt-get install -y -q gcc libssl-dev libpq-dev -WORKDIR /app -COPY pyproject.toml poetry.lock /app/ +COPY . /app RUN poetry install --without dev -RUN set -xe \ - && apt-get remove -y gcc python3-dev libssl-dev \ - && apt-get autoremove -y \ - && apt-get clean -y \ - && rm -rf /var/lib/apt/lists/* +# Final image without setup dependencies. +FROM base AS final -COPY . /app/ +LABEL source="https://github.com/sartography/spiff-arena" +LABEL description="Software development platform for building, running, and monitoring executable diagrams" -# run poetry install again AFTER copying the app into the image -# otherwise it does not know what the main app module is -RUN poetry install --without dev +RUN apt-get update \ + && apt-get clean -y \ + && apt-get install -y -q curl git-core gunicorn3 default-mysql-client \ + && rm -rf /var/lib/apt/lists/* -CMD ./bin/boot_server_in_docker +COPY --from=setup /app /app + +ENTRYPOINT ["./bin/boot_server_in_docker"] From 60b048b698bab2e824c3eb1aaee835f5c5344eab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Soko=C5=82owski?= Date: Mon, 30 Jan 2023 16:46:04 +0100 Subject: [PATCH 02/40] frontend: avoid redundant steps in Dockerfile MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Use separate base, setup and final to avoid redundat steps. Also add default value for `PORT0` as `7001`. Signed-off-by: Jakub Sokołowski --- spiffworkflow-frontend/Dockerfile | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/spiffworkflow-frontend/Dockerfile b/spiffworkflow-frontend/Dockerfile index b64169c2..06755d63 100644 --- a/spiffworkflow-frontend/Dockerfile +++ b/spiffworkflow-frontend/Dockerfile @@ -1,5 +1,5 @@ -### STAGE 1: Build ### -FROM quay.io/sartography/node:latest +# Base image to share ENV vars that activate VENV. +FROM quay.io/sartography/node:latest AS base RUN mkdir /app WORKDIR /app @@ -7,8 +7,10 @@ WORKDIR /app # this matches total memory on spiffworkflow-demo ENV NODE_OPTIONS=--max_old_space_size=2048 -ADD package.json /app/ -ADD package-lock.json /app/ +# Setup image for installing JS dependencies. +FROM base AS setup + +COPY . /app/ # npm ci because it respects the lock file. # --ignore-scripts because authors can do bad things in postinstall scripts. @@ -16,8 +18,17 @@ ADD package-lock.json /app/ # npx can-i-ignore-scripts can check that it's safe to ignore scripts. RUN npm ci --ignore-scripts -COPY . /app/ - RUN npm run build +# Final image without setup dependencies. +FROM base AS final + +LABEL source="https://github.com/sartography/spiff-arena" +LABEL description="Software development platform for building, running, and monitoring executable diagrams" + +# WARNING: On localhost frontend assumes backend is one port lowe. +ENV PORT0=7001 + +COPY --from=setup /app/build /app/build + ENTRYPOINT ["/app/bin/boot_server_in_docker"] From 799ea492dded2c02578a72784c4f23cc0a4d1165 Mon Sep 17 00:00:00 2001 From: burnettk Date: Mon, 30 Jan 2023 11:53:35 -0500 Subject: [PATCH 03/40] new mechanism to handle help more in line with how carbon works --- .../carbon/BaseInputTemplate/BaseInputTemplate.tsx | 6 ++++++ .../carbon/FieldHelpTemplate/FieldHelpTemplate.tsx | 10 ++++------ .../src/themes/carbon/SelectWidget/SelectWidget.tsx | 9 ++++++++- 3 files changed, 18 insertions(+), 7 deletions(-) diff --git a/spiffworkflow-frontend/src/themes/carbon/BaseInputTemplate/BaseInputTemplate.tsx b/spiffworkflow-frontend/src/themes/carbon/BaseInputTemplate/BaseInputTemplate.tsx index 7954a0ac..c6f677d2 100644 --- a/spiffworkflow-frontend/src/themes/carbon/BaseInputTemplate/BaseInputTemplate.tsx +++ b/spiffworkflow-frontend/src/themes/carbon/BaseInputTemplate/BaseInputTemplate.tsx @@ -85,6 +85,11 @@ export default function BaseInputTemplate< labelToUse = `${labelToUse}*`; } + let helperText = null; + if (uiSchema && uiSchema['ui:help']) { + helperText = uiSchema['ui:help']; + } + let invalid = false; let errorMessageForField = null; if (rawErrors && rawErrors.length > 0) { @@ -103,6 +108,7 @@ export default function BaseInputTemplate< name={id} className="input" labelText={labelToUse} + helperText={helperText} invalid={invalid} invalidText={errorMessageForField} autoFocus={autofocus} diff --git a/spiffworkflow-frontend/src/themes/carbon/FieldHelpTemplate/FieldHelpTemplate.tsx b/spiffworkflow-frontend/src/themes/carbon/FieldHelpTemplate/FieldHelpTemplate.tsx index 08a61aeb..da4543f2 100644 --- a/spiffworkflow-frontend/src/themes/carbon/FieldHelpTemplate/FieldHelpTemplate.tsx +++ b/spiffworkflow-frontend/src/themes/carbon/FieldHelpTemplate/FieldHelpTemplate.tsx @@ -7,10 +7,8 @@ import FormHelperText from '@mui/material/FormHelperText'; * @param props - The `FieldHelpProps` to be rendered */ export default function FieldHelpTemplate(props: FieldHelpProps) { - const { idSchema, help } = props; - if (!help) { - return null; - } - const id = `${idSchema.$id}__help`; - return {help}; + // ui:help is handled by helperText in all carbon widgets. + // see BaseInputTemplate/BaseInputTemplate.tsx and + // SelectWidget/SelectWidget.tsx + return null; } diff --git a/spiffworkflow-frontend/src/themes/carbon/SelectWidget/SelectWidget.tsx b/spiffworkflow-frontend/src/themes/carbon/SelectWidget/SelectWidget.tsx index 616b11a7..d74b9b7b 100644 --- a/spiffworkflow-frontend/src/themes/carbon/SelectWidget/SelectWidget.tsx +++ b/spiffworkflow-frontend/src/themes/carbon/SelectWidget/SelectWidget.tsx @@ -41,6 +41,10 @@ function SelectWidget({ } else if (schema && schema.title) { labelToUse = schema.title; } + let helperText = null; + if (uiSchema && uiSchema['ui:help']) { + helperText = uiSchema['ui:help']; + } if (required) { labelToUse = `${labelToUse}*`; } @@ -52,13 +56,16 @@ function SelectWidget({ errorMessageForField = `${labelToUse.replace(/\*$/, '')} ${rawErrors[0]}`; } + // maybe use placeholder somehow. it was previously jammed into the helperText field, + // but allowing ui:help to grab that spot seems much more appropriate. + return ( 0) { invalid = true; - errorMessageForField = `${labelToUse.replace(/\*$/, '')} ${rawErrors[0]}`; + errorMessageForField = rawErrors[0]; } return ( From de607ded0f10c02b295ff5226faa7a242a54f822 Mon Sep 17 00:00:00 2001 From: burnettk Date: Mon, 30 Jan 2023 14:21:02 -0500 Subject: [PATCH 07/40] even textareas need to have blank labels since labels are in FieldTemplate --- .../src/themes/carbon/TextareaWidget/TextareaWidget.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spiffworkflow-frontend/src/themes/carbon/TextareaWidget/TextareaWidget.tsx b/spiffworkflow-frontend/src/themes/carbon/TextareaWidget/TextareaWidget.tsx index beb1934e..d6c8dd72 100644 --- a/spiffworkflow-frontend/src/themes/carbon/TextareaWidget/TextareaWidget.tsx +++ b/spiffworkflow-frontend/src/themes/carbon/TextareaWidget/TextareaWidget.tsx @@ -74,7 +74,7 @@ function TextareaWidget< name={id} className="form-control" value={value || ''} - labelText={labelToUse} + labelText="" placeholder={placeholder} required={required} disabled={disabled} From b43f7b6cf5a73ae91c20a9320f5f3069adae0d06 Mon Sep 17 00:00:00 2001 From: burnettk Date: Mon, 30 Jan 2023 15:51:48 -0500 Subject: [PATCH 08/40] make task show wide, and make repeating form icons match site styles. w/ dfunk --- .../src/routes/TaskShow.tsx | 2 +- .../src/themes/carbon/AddButton/AddButton.tsx | 45 +++++--- .../ArrayFieldItemTemplate.tsx | 105 ++++++++++------- .../themes/carbon/IconButton/IconButton.tsx | 107 ++++++++++++------ .../src/themes/carbon/index.css | 4 + 5 files changed, 173 insertions(+), 90 deletions(-) diff --git a/spiffworkflow-frontend/src/routes/TaskShow.tsx b/spiffworkflow-frontend/src/routes/TaskShow.tsx index f08bf904..de7141a1 100644 --- a/spiffworkflow-frontend/src/routes/TaskShow.tsx +++ b/spiffworkflow-frontend/src/routes/TaskShow.tsx @@ -217,7 +217,7 @@ export default function TaskShow() { return ( - +
= ({ - uiSchema, - ...props -}) => { +// @ts-ignore +import { AddAlt } from '@carbon/icons-react'; + +import IconButton from '../IconButton/IconButton'; + +/** The `AddButton` renders a button that represent the `Add` action on a form + */ +export default function AddButton< + T = any, + S extends StrictRJSFSchema = RJSFSchema, + F extends FormContextType = any +>({ className, onClick, disabled, registry }: IconButtonProps) { return ( - - - +
+

+ +

+
); -}; - -export default AddButton; +} diff --git a/spiffworkflow-frontend/src/themes/carbon/ArrayFieldItemTemplate/ArrayFieldItemTemplate.tsx b/spiffworkflow-frontend/src/themes/carbon/ArrayFieldItemTemplate/ArrayFieldItemTemplate.tsx index cf596f98..df8896bf 100644 --- a/spiffworkflow-frontend/src/themes/carbon/ArrayFieldItemTemplate/ArrayFieldItemTemplate.tsx +++ b/spiffworkflow-frontend/src/themes/carbon/ArrayFieldItemTemplate/ArrayFieldItemTemplate.tsx @@ -5,6 +5,11 @@ import { RJSFSchema, StrictRJSFSchema, } from '@rjsf/utils'; +import { + Grid, + Column, + // @ts-ignore +} from '@carbon/react'; /** The `ArrayFieldItemTemplate` component is the template used to render an items of an array. * @@ -33,53 +38,67 @@ export default function ArrayFieldItemTemplate< const { MoveDownButton, MoveUpButton, RemoveButton } = registry.templates.ButtonTemplates; const btnStyle: CSSProperties = { - flex: 1, - paddingLeft: 6, - paddingRight: 6, - fontWeight: 'bold', + marginBottom: '0.5em', }; + const mainColumnWidthSmall = hasToolbar ? 3 : 4; + const mainColumnWidthMedium = hasToolbar ? 6 : 8; + const mainColumnWidthLarge = hasToolbar ? 12 : 16; return (
-
{children}
- {hasToolbar && ( -
-
+ + {children} + + {hasToolbar && ( + - {(hasMoveUp || hasMoveDown) && ( - - )} - {(hasMoveUp || hasMoveDown) && ( - - )} - {hasRemove && ( - - )} -
-
- )} +
+
+
+ {(hasMoveUp || hasMoveDown) && ( + + )} +
+
+ {(hasMoveUp || hasMoveDown) && ( + + )} +
+
+ {hasRemove && ( + + )} +
+
+
+ + )} +
); } diff --git a/spiffworkflow-frontend/src/themes/carbon/IconButton/IconButton.tsx b/spiffworkflow-frontend/src/themes/carbon/IconButton/IconButton.tsx index 82559106..64eb883d 100644 --- a/spiffworkflow-frontend/src/themes/carbon/IconButton/IconButton.tsx +++ b/spiffworkflow-frontend/src/themes/carbon/IconButton/IconButton.tsx @@ -1,55 +1,96 @@ import React from 'react'; -import IconButton, { - IconButtonProps as MuiIconButtonProps, -} from '@mui/material/IconButton'; -import ArrowDownwardIcon from '@mui/icons-material/ArrowDownward'; -import ArrowUpwardIcon from '@mui/icons-material/ArrowUpward'; -import RemoveIcon from '@mui/icons-material/Remove'; -import { IconButtonProps } from '@rjsf/utils'; +import { + FormContextType, + IconButtonProps, + RJSFSchema, + StrictRJSFSchema, +} from '@rjsf/utils'; -export default function MuiIconButton(props: IconButtonProps) { - const { icon, color, uiSchema, ...otherProps } = props; +// @ts-ignore +import { Add, TrashCan, ArrowUp, ArrowDown } from '@carbon/icons-react'; + +export default function IconButton< + T = any, + S extends StrictRJSFSchema = RJSFSchema, + F extends FormContextType = any +>(props: IconButtonProps) { + const { + iconType = 'default', + icon, + className, + uiSchema, + registry, + ...otherProps + } = props; + // icon string optios: plus, remove, arrow-up, arrow-down + let carbonIcon = ( +

+ Add new +

+ ); + if (icon === 'remove') { + carbonIcon = ; + } + if (icon === 'arrow-up') { + carbonIcon = ; + } + if (icon === 'arrow-down') { + carbonIcon = ; + } + + return ( + + ); +} + +export function MoveDownButton< + T = any, + S extends StrictRJSFSchema = RJSFSchema, + F extends FormContextType = any +>(props: IconButtonProps) { return ( - {icon} - - ); -} - -export function MoveDownButton(props: IconButtonProps) { - return ( - } + icon="arrow-down" /> ); } -export function MoveUpButton(props: IconButtonProps) { +export function MoveUpButton< + T = any, + S extends StrictRJSFSchema = RJSFSchema, + F extends FormContextType = any +>(props: IconButtonProps) { return ( - } + icon="arrow-up" /> ); } -export function RemoveButton(props: IconButtonProps) { - const { iconType, ...otherProps } = props; +export function RemoveButton< + T = any, + S extends StrictRJSFSchema = RJSFSchema, + F extends FormContextType = any +>(props: IconButtonProps) { return ( - - } + className="array-item-remove" + {...props} + iconType="danger" + icon="remove" /> ); } diff --git a/spiffworkflow-frontend/src/themes/carbon/index.css b/spiffworkflow-frontend/src/themes/carbon/index.css index 72cdbb84..b536e7d2 100644 --- a/spiffworkflow-frontend/src/themes/carbon/index.css +++ b/spiffworkflow-frontend/src/themes/carbon/index.css @@ -17,3 +17,7 @@ .rjsf .rjsf-field .rjsf-field { margin-bottom: 2em; } + +.array-item-toolbox { + margin-left: 2em; +} From a4e4fdbd44e614df4bd01d3731c940466ccacf2a Mon Sep 17 00:00:00 2001 From: Dan Date: Mon, 30 Jan 2023 16:43:47 -0500 Subject: [PATCH 09/40] IBM says you can't have more columns than your parents, even if you try to start another grid, with kburnett --- spiffworkflow-frontend/src/routes/TaskShow.tsx | 2 +- .../ArrayFieldItemTemplate.tsx | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/spiffworkflow-frontend/src/routes/TaskShow.tsx b/spiffworkflow-frontend/src/routes/TaskShow.tsx index de7141a1..83e5df3f 100644 --- a/spiffworkflow-frontend/src/routes/TaskShow.tsx +++ b/spiffworkflow-frontend/src/routes/TaskShow.tsx @@ -217,7 +217,7 @@ export default function TaskShow() { return ( - + @@ -55,9 +55,9 @@ export default function ArrayFieldItemTemplate< {hasToolbar && (
From 797ccbad5f656efdc9ff2d3dddcf47f8d554a985 Mon Sep 17 00:00:00 2001 From: burnettk Date: Mon, 30 Jan 2023 16:47:20 -0500 Subject: [PATCH 10/40] remove unneeded divs --- .../ArrayFieldItemTemplate.tsx | 66 ++++++++----------- 1 file changed, 28 insertions(+), 38 deletions(-) diff --git a/spiffworkflow-frontend/src/themes/carbon/ArrayFieldItemTemplate/ArrayFieldItemTemplate.tsx b/spiffworkflow-frontend/src/themes/carbon/ArrayFieldItemTemplate/ArrayFieldItemTemplate.tsx index 1a732a1c..cf1d2e35 100644 --- a/spiffworkflow-frontend/src/themes/carbon/ArrayFieldItemTemplate/ArrayFieldItemTemplate.tsx +++ b/spiffworkflow-frontend/src/themes/carbon/ArrayFieldItemTemplate/ArrayFieldItemTemplate.tsx @@ -54,46 +54,36 @@ export default function ArrayFieldItemTemplate< {children} {hasToolbar && ( - +
-
- {(hasMoveUp || hasMoveDown) && ( - - )} -
-
- {(hasMoveUp || hasMoveDown) && ( - - )} -
-
- {hasRemove && ( - - )} -
+ {(hasMoveUp || hasMoveDown) && ( + + )} + {(hasMoveUp || hasMoveDown) && ( + + )} + {hasRemove && ( + + )}
From 2152b3f5a62ce14d54972a8c2944a873186491e1 Mon Sep 17 00:00:00 2001 From: Dan Date: Mon, 30 Jan 2023 16:50:43 -0500 Subject: [PATCH 11/40] Fix typing issue. --- spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py index 50c6f88a..6873198a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py @@ -255,7 +255,7 @@ def parse_id_token(token: str) -> Any: return json.loads(decoded) -def login_return(code: str, state: str, session_state: str = None) -> Optional[Response]: +def login_return(code: str, state: str, session_state: str = "") -> Optional[Response]: """Login_return.""" state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8")) state_redirect_url = state_dict["redirect_url"] From 9731d79ceb53cefc7ef4c904214a0bac41a8613a Mon Sep 17 00:00:00 2001 From: burnettk Date: Mon, 30 Jan 2023 17:44:15 -0500 Subject: [PATCH 12/40] get bin as well for script --- spiffworkflow-frontend/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/spiffworkflow-frontend/Dockerfile b/spiffworkflow-frontend/Dockerfile index 06755d63..f06ade93 100644 --- a/spiffworkflow-frontend/Dockerfile +++ b/spiffworkflow-frontend/Dockerfile @@ -30,5 +30,6 @@ LABEL description="Software development platform for building, running, and moni ENV PORT0=7001 COPY --from=setup /app/build /app/build +COPY --from=setup /app/bin /app/bin ENTRYPOINT ["/app/bin/boot_server_in_docker"] From 22897abcb89e24468dcc6d35a998a0a394b861d7 Mon Sep 17 00:00:00 2001 From: burnettk Date: Mon, 30 Jan 2023 18:01:56 -0500 Subject: [PATCH 13/40] revert Dockerfile until we get it working --- spiffworkflow-frontend/Dockerfile | 24 ++++++------------------ 1 file changed, 6 insertions(+), 18 deletions(-) diff --git a/spiffworkflow-frontend/Dockerfile b/spiffworkflow-frontend/Dockerfile index f06ade93..b64169c2 100644 --- a/spiffworkflow-frontend/Dockerfile +++ b/spiffworkflow-frontend/Dockerfile @@ -1,5 +1,5 @@ -# Base image to share ENV vars that activate VENV. -FROM quay.io/sartography/node:latest AS base +### STAGE 1: Build ### +FROM quay.io/sartography/node:latest RUN mkdir /app WORKDIR /app @@ -7,10 +7,8 @@ WORKDIR /app # this matches total memory on spiffworkflow-demo ENV NODE_OPTIONS=--max_old_space_size=2048 -# Setup image for installing JS dependencies. -FROM base AS setup - -COPY . /app/ +ADD package.json /app/ +ADD package-lock.json /app/ # npm ci because it respects the lock file. # --ignore-scripts because authors can do bad things in postinstall scripts. @@ -18,18 +16,8 @@ COPY . /app/ # npx can-i-ignore-scripts can check that it's safe to ignore scripts. RUN npm ci --ignore-scripts +COPY . /app/ + RUN npm run build -# Final image without setup dependencies. -FROM base AS final - -LABEL source="https://github.com/sartography/spiff-arena" -LABEL description="Software development platform for building, running, and monitoring executable diagrams" - -# WARNING: On localhost frontend assumes backend is one port lowe. -ENV PORT0=7001 - -COPY --from=setup /app/build /app/build -COPY --from=setup /app/bin /app/bin - ENTRYPOINT ["/app/bin/boot_server_in_docker"] From fdc2d80af0613f7e4be568770811406a4038decb Mon Sep 17 00:00:00 2001 From: burnettk Date: Mon, 30 Jan 2023 18:02:59 -0500 Subject: [PATCH 14/40] Revert "revert Dockerfile until we get it working" This reverts commit 22897abcb89e24468dcc6d35a998a0a394b861d7. --- spiffworkflow-frontend/Dockerfile | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/spiffworkflow-frontend/Dockerfile b/spiffworkflow-frontend/Dockerfile index b64169c2..f06ade93 100644 --- a/spiffworkflow-frontend/Dockerfile +++ b/spiffworkflow-frontend/Dockerfile @@ -1,5 +1,5 @@ -### STAGE 1: Build ### -FROM quay.io/sartography/node:latest +# Base image to share ENV vars that activate VENV. +FROM quay.io/sartography/node:latest AS base RUN mkdir /app WORKDIR /app @@ -7,8 +7,10 @@ WORKDIR /app # this matches total memory on spiffworkflow-demo ENV NODE_OPTIONS=--max_old_space_size=2048 -ADD package.json /app/ -ADD package-lock.json /app/ +# Setup image for installing JS dependencies. +FROM base AS setup + +COPY . /app/ # npm ci because it respects the lock file. # --ignore-scripts because authors can do bad things in postinstall scripts. @@ -16,8 +18,18 @@ ADD package-lock.json /app/ # npx can-i-ignore-scripts can check that it's safe to ignore scripts. RUN npm ci --ignore-scripts -COPY . /app/ - RUN npm run build +# Final image without setup dependencies. +FROM base AS final + +LABEL source="https://github.com/sartography/spiff-arena" +LABEL description="Software development platform for building, running, and monitoring executable diagrams" + +# WARNING: On localhost frontend assumes backend is one port lowe. +ENV PORT0=7001 + +COPY --from=setup /app/build /app/build +COPY --from=setup /app/bin /app/bin + ENTRYPOINT ["/app/bin/boot_server_in_docker"] From 4c47f0b71102a481bc3c5a080c2c1660322a63c8 Mon Sep 17 00:00:00 2001 From: burnettk Date: Mon, 30 Jan 2023 18:40:03 -0500 Subject: [PATCH 15/40] allow overriding git related configs w/ env var and log permissions stuff on boot --- .../src/spiffworkflow_backend/config/__init__.py | 10 ++++++++-- .../src/spiffworkflow_backend/config/default.py | 2 ++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py index f9f19571..d7afbeb9 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py @@ -82,13 +82,19 @@ def setup_config(app: Flask) -> None: app.config.from_pyfile(f"{app.instance_path}/config.py", silent=True) app.config["PERMISSIONS_FILE_FULLPATH"] = None - if app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"]: + permissions_file_name = app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"] + if permissions_file_name is not None: app.config["PERMISSIONS_FILE_FULLPATH"] = os.path.join( app.root_path, "config", "permissions", - app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"], + permissions_file_name, ) + print(f"base_permissions: loaded permissions file: {permissions_file_name}") + else: + print("base_permissions: no permissions file loaded") + + # unversioned (see .gitignore) config that can override everything and include secrets. # src/spiffworkflow_backend/config/secrets.py diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py index 252b2b89..4f0a8296 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py @@ -69,6 +69,8 @@ GIT_BRANCH = environ.get("GIT_BRANCH") GIT_CLONE_URL_FOR_PUBLISHING = environ.get("GIT_CLONE_URL") GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true" GIT_SSH_PRIVATE_KEY = environ.get("GIT_SSH_PRIVATE_KEY") +GIT_USERNAME = environ.get("GIT_USERNAME") +GIT_USER_EMAIL = environ.get("GIT_USER_EMAIL") # Datbase Configuration SPIFF_DATABASE_TYPE = environ.get( From cce1a2246c39698ee5819a6a4067ce53ab2d6e3b Mon Sep 17 00:00:00 2001 From: burnettk Date: Mon, 30 Jan 2023 22:35:24 -0500 Subject: [PATCH 16/40] add deps for serve --- spiffworkflow-frontend/.dockerignore | 1 + spiffworkflow-frontend/Dockerfile | 7 ++++ spiffworkflow-frontend/package.justserve.json | 36 +++++++++++++++++++ 3 files changed, 44 insertions(+) create mode 100644 spiffworkflow-frontend/.dockerignore create mode 100644 spiffworkflow-frontend/package.justserve.json diff --git a/spiffworkflow-frontend/.dockerignore b/spiffworkflow-frontend/.dockerignore new file mode 100644 index 00000000..07e6e472 --- /dev/null +++ b/spiffworkflow-frontend/.dockerignore @@ -0,0 +1 @@ +/node_modules diff --git a/spiffworkflow-frontend/Dockerfile b/spiffworkflow-frontend/Dockerfile index f06ade93..c39c20dc 100644 --- a/spiffworkflow-frontend/Dockerfile +++ b/spiffworkflow-frontend/Dockerfile @@ -12,6 +12,12 @@ FROM base AS setup COPY . /app/ +RUN cp /app/package.json /app/package.json.bak +ADD package.justserve.json /app/package.json +RUN npm ci --ignore-scripts +RUN cp -r /app/node_modules /app/node_modules.justserve +RUN cp /app/package.json.bak /app/package.json + # npm ci because it respects the lock file. # --ignore-scripts because authors can do bad things in postinstall scripts. # https://cheatsheetseries.owasp.org/cheatsheets/NPM_Security_Cheat_Sheet.html @@ -31,5 +37,6 @@ ENV PORT0=7001 COPY --from=setup /app/build /app/build COPY --from=setup /app/bin /app/bin +COPY --from=setup /app/node_modules.justserve /app/node_modules ENTRYPOINT ["/app/bin/boot_server_in_docker"] diff --git a/spiffworkflow-frontend/package.justserve.json b/spiffworkflow-frontend/package.justserve.json new file mode 100644 index 00000000..d78004a3 --- /dev/null +++ b/spiffworkflow-frontend/package.justserve.json @@ -0,0 +1,36 @@ +{ + "name": "spiffworkflow-frontend", + "version": "0.1.0", + "private": true, + "dependencies": { + "serve": "^14.0.0" + }, + "scripts": { + "start": "ESLINT_NO_DEV_ERRORS=true PORT=7001 craco start", + "build": "craco build", + "test": "react-scripts test --coverage", + "t": "npm test -- --watchAll=false", + "eject": "craco eject", + "format": "prettier --write src/**/*.[tj]s{,x}", + "lint": "./node_modules/.bin/eslint src", + "lint:fix": "./node_modules/.bin/eslint --fix src" + }, + "eslintConfig": { + "extends": [ + "react-app", + "react-app/jest" + ] + }, + "browserslist": { + "production": [ + ">0.2%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + } +} From fb123e12e1298539353acf39f7b9e605d86d476a Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 31 Jan 2023 09:27:02 -0500 Subject: [PATCH 17/40] less annoying file name for autocomplete --- spiffworkflow-frontend/Dockerfile | 2 +- .../{package.justserve.json => justservewebserver.package.json} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename spiffworkflow-frontend/{package.justserve.json => justservewebserver.package.json} (100%) diff --git a/spiffworkflow-frontend/Dockerfile b/spiffworkflow-frontend/Dockerfile index c39c20dc..c777a268 100644 --- a/spiffworkflow-frontend/Dockerfile +++ b/spiffworkflow-frontend/Dockerfile @@ -13,7 +13,7 @@ FROM base AS setup COPY . /app/ RUN cp /app/package.json /app/package.json.bak -ADD package.justserve.json /app/package.json +ADD justservewebserver.package.json /app/package.json RUN npm ci --ignore-scripts RUN cp -r /app/node_modules /app/node_modules.justserve RUN cp /app/package.json.bak /app/package.json diff --git a/spiffworkflow-frontend/package.justserve.json b/spiffworkflow-frontend/justservewebserver.package.json similarity index 100% rename from spiffworkflow-frontend/package.justserve.json rename to spiffworkflow-frontend/justservewebserver.package.json From 0e94a4f4cf2b4880d5a8c6e7a1b5a535a90355a2 Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 31 Jan 2023 16:14:22 -0500 Subject: [PATCH 18/40] shuffle around Dockerfile to allow to work for background container --- spiffworkflow-backend/Dockerfile | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/spiffworkflow-backend/Dockerfile b/spiffworkflow-backend/Dockerfile index f05f1973..f4a8f8ec 100644 --- a/spiffworkflow-backend/Dockerfile +++ b/spiffworkflow-backend/Dockerfile @@ -7,6 +7,14 @@ ENV PATH="$VIRTUAL_ENV/bin:$PATH" WORKDIR /app +# base plus packages needed for deployment. Could just install these in final, but then we can't cache as much. +FROM base AS deployment + +RUN apt-get update \ + && apt-get clean -y \ + && apt-get install -y -q curl git-core gunicorn3 default-mysql-client \ + && rm -rf /var/lib/apt/lists/* + # Setup image for installing Python dependencies. FROM base AS setup @@ -20,16 +28,11 @@ COPY . /app RUN poetry install --without dev # Final image without setup dependencies. -FROM base AS final +FROM deployment AS final LABEL source="https://github.com/sartography/spiff-arena" LABEL description="Software development platform for building, running, and monitoring executable diagrams" -RUN apt-get update \ - && apt-get clean -y \ - && apt-get install -y -q curl git-core gunicorn3 default-mysql-client \ - && rm -rf /var/lib/apt/lists/* - COPY --from=setup /app /app -ENTRYPOINT ["./bin/boot_server_in_docker"] +CMD ["./bin/boot_server_in_docker"] From adcb841214088ccf6e1b717db560aba1f95e43d6 Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 31 Jan 2023 16:32:04 -0500 Subject: [PATCH 19/40] remove duplicate label on radio buttons --- .../src/themes/carbon/RadioWidget/RadioWidget.tsx | 3 --- 1 file changed, 3 deletions(-) diff --git a/spiffworkflow-frontend/src/themes/carbon/RadioWidget/RadioWidget.tsx b/spiffworkflow-frontend/src/themes/carbon/RadioWidget/RadioWidget.tsx index 86dad81e..2f65bca9 100644 --- a/spiffworkflow-frontend/src/themes/carbon/RadioWidget/RadioWidget.tsx +++ b/spiffworkflow-frontend/src/themes/carbon/RadioWidget/RadioWidget.tsx @@ -32,9 +32,6 @@ const RadioWidget = ({ return ( <> - - {label || schema.title} - Date: Tue, 31 Jan 2023 17:11:11 -0500 Subject: [PATCH 20/40] folks who can start instances can also view their logs --- .../services/authorization_service.py | 8 +++++++- .../scripts/test_get_all_permissions.py | 5 +++++ .../unit/test_authorization_service.py | 8 ++++++++ 3 files changed, 20 insertions(+), 1 deletion(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py index 793a3f9b..19f9f418 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py @@ -551,7 +551,9 @@ class AuthorizationService: permissions_to_assign: list[PermissionToAssign] = [] - # we were thinking that if you can start an instance, you ought to be able to view your own instances. + # we were thinking that if you can start an instance, you ought to be able to: + # 1. view your own instances. + # 2. view the logs for these instances. if permission_set == "start": target_uri = f"/process-instances/{process_related_path_segment}" permissions_to_assign.append( @@ -561,6 +563,10 @@ class AuthorizationService: permissions_to_assign.append( PermissionToAssign(permission="read", target_uri=target_uri) ) + target_uri = f"/logs/{process_related_path_segment}" + permissions_to_assign.append( + PermissionToAssign(permission="read", target_uri=target_uri) + ) else: if permission_set == "all": diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py index b31c7228..95d15fbf 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py @@ -41,6 +41,11 @@ class TestGetAllPermissions(BaseTest): ) expected_permissions = [ + { + "group_identifier": "my_test_group", + "uri": "/logs/hey:group:*", + "permissions": ["read"], + }, { "group_identifier": "my_test_group", "uri": "/process-instances/hey:group:*", diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py index 2736693e..d414616c 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py @@ -197,6 +197,10 @@ class TestAuthorizationService(BaseTest): ) -> None: """Test_explode_permissions_start_on_process_group.""" expected_permissions = [ + ( + "/logs/some-process-group:some-process-model:*", + "read", + ), ( "/process-instances/for-me/some-process-group:some-process-model:*", "read", @@ -255,6 +259,10 @@ class TestAuthorizationService(BaseTest): ) -> None: """Test_explode_permissions_start_on_process_model.""" expected_permissions = [ + ( + "/logs/some-process-group:some-process-model/*", + "read", + ), ( "/process-instances/for-me/some-process-group:some-process-model/*", "read", From 3abe82af4ac9f4295d57be0d352b90b907ce093c Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 31 Jan 2023 22:30:15 -0500 Subject: [PATCH 21/40] more sentry performance tracing --- .../src/spiffworkflow_backend/__init__.py | 28 +++++++++++++++++ .../spiffworkflow_backend/config/__init__.py | 2 -- .../routes/tasks_controller.py | 30 +++++++++++++++++-- 3 files changed, 56 insertions(+), 4 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py index 46f82581..341cfac8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py @@ -157,6 +157,29 @@ def get_hacked_up_app_for_script() -> flask.app.Flask: return app +def traces_sampler(sampling_context: Any) -> Any: + # always inherit + if sampling_context["parent_sampled"] is not None: + return sampling_context["parent_sampled"] + + if "wsgi_environ" in sampling_context: + wsgi_environ = sampling_context["wsgi_environ"] + path_info = wsgi_environ.get("PATH_INFO") + request_method = wsgi_environ.get("REQUEST_METHOD") + + # tasks_controller.task_submit + # this is the current pain point as of 31 jan 2023. + if ( + path_info + and path_info.startswith("/v1.0/tasks/") + and request_method == "PUT" + ): + return 1 + + # Default sample rate for all others (replaces traces_sample_rate) + return 0.01 + + def configure_sentry(app: flask.app.Flask) -> None: """Configure_sentry.""" import sentry_sdk @@ -193,5 +216,10 @@ def configure_sentry(app: flask.app.Flask) -> None: # of transactions for performance monitoring. # We recommend adjusting this value to less than 1(00%) in production. traces_sample_rate=float(sentry_traces_sample_rate), + traces_sampler=traces_sampler, + # The profiles_sample_rate setting is relative to the traces_sample_rate setting. + _experiments={ + "profiles_sample_rate": 1, + }, before_send=before_send, ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py index d7afbeb9..64c7e2c1 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py @@ -94,8 +94,6 @@ def setup_config(app: Flask) -> None: else: print("base_permissions: no permissions file loaded") - - # unversioned (see .gitignore) config that can override everything and include secrets. # src/spiffworkflow_backend/config/secrets.py app.config.from_pyfile(os.path.join("config", "secrets.py"), silent=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 8ee9f53d..fcc0dba0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -10,6 +10,7 @@ from typing import Union import flask.wrappers import jinja2 +import sentry_sdk from flask import current_app from flask import g from flask import jsonify @@ -326,13 +327,12 @@ def process_data_show( ) -def task_submit( +def task_submit_shared( process_instance_id: int, task_id: str, body: Dict[str, Any], terminate_loop: bool = False, ) -> flask.wrappers.Response: - """Task_submit_user_data.""" principal = _find_principal_or_raise() process_instance = _find_process_instance_by_id_or_raise(process_instance_id) if not process_instance.can_submit_task(): @@ -417,6 +417,32 @@ def task_submit( return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") +def task_submit( + process_instance_id: int, + task_id: str, + body: Dict[str, Any], + terminate_loop: bool = False, +) -> flask.wrappers.Response: + """Task_submit_user_data.""" + sentry_op = "controller_action" + sentry_transaction_name = "tasks_controller.task_submit" + transaction = sentry_sdk.Hub.current.scope.transaction + if transaction is None: + current_app.logger.debug( + "transaction was None. pretty sure this never happens." + ) + with sentry_sdk.start_transaction(op=sentry_op, name=sentry_transaction_name): + return task_submit_shared( + process_instance_id, task_id, body, terminate_loop + ) + else: + current_app.logger.debug("transaction existed.") + with transaction.start_child(op=sentry_op, description=sentry_transaction_name): + return task_submit_shared( + process_instance_id, task_id, body, terminate_loop + ) + + def _get_tasks( processes_started_by_user: bool = True, has_lane_assignment_id: bool = True, From f58be8fe91844175b43f46356798612d29fe30ee Mon Sep 17 00:00:00 2001 From: burnettk Date: Wed, 1 Feb 2023 07:45:48 -0500 Subject: [PATCH 22/40] more spans to track performance --- .../routes/tasks_controller.py | 25 +++++++++---------- .../services/process_instance_service.py | 6 +++-- 2 files changed, 16 insertions(+), 15 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index fcc0dba0..feb9218e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -380,15 +380,16 @@ def task_submit_shared( ) ) - processor.lock_process_instance("Web") - ProcessInstanceService.complete_form_task( - processor=processor, - spiff_task=spiff_task, - data=body, - user=g.user, - human_task=human_task, - ) - processor.unlock_process_instance("Web") + with sentry_sdk.start_span(op="task", description="complete_form_task"): + processor.lock_process_instance("Web") + ProcessInstanceService.complete_form_task( + processor=processor, + spiff_task=spiff_task, + data=body, + user=g.user, + human_task=human_task, + ) + processor.unlock_process_instance("Web") # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same # task spec, complete that form as well. @@ -428,15 +429,13 @@ def task_submit( sentry_transaction_name = "tasks_controller.task_submit" transaction = sentry_sdk.Hub.current.scope.transaction if transaction is None: - current_app.logger.debug( - "transaction was None. pretty sure this never happens." - ) + current_app.logger.info("transaction was None. pretty sure this never happens.") with sentry_sdk.start_transaction(op=sentry_op, name=sentry_transaction_name): return task_submit_shared( process_instance_id, task_id, body, terminate_loop ) else: - current_app.logger.debug("transaction existed.") + current_app.logger.info("transaction existed.") with transaction.start_child(op=sentry_op, description=sentry_transaction_name): return task_submit_shared( process_instance_id, task_id, body, terminate_loop diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index 9b07ce1f..63c53a21 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -4,6 +4,7 @@ from typing import Any from typing import List from typing import Optional +import sentry_sdk from flask import current_app from SpiffWorkflow.task import Task as SpiffTask # type: ignore @@ -234,8 +235,9 @@ class ProcessInstanceService: # ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store. processor.complete_task(spiff_task, human_task, user=user) - # maybe move this out once we have the interstitial page since this is here just so we can get the next human task - processor.do_engine_steps(save=True) + with sentry_sdk.start_span(op="task", description="backend_do_engine_steps"): + # maybe move this out once we have the interstitial page since this is here just so we can get the next human task + processor.do_engine_steps(save=True) @staticmethod def extract_form_data(latest_data: dict, task: SpiffTask) -> dict: From 48781039c77834f526fe84bf4f3d9464464c5d48 Mon Sep 17 00:00:00 2001 From: burnettk Date: Wed, 1 Feb 2023 07:53:35 -0500 Subject: [PATCH 23/40] avoid poetry installing deps when we have them cached if they do not change --- spiffworkflow-backend/Dockerfile | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/spiffworkflow-backend/Dockerfile b/spiffworkflow-backend/Dockerfile index f4a8f8ec..d7a4b034 100644 --- a/spiffworkflow-backend/Dockerfile +++ b/spiffworkflow-backend/Dockerfile @@ -24,6 +24,11 @@ RUN useradd _gunicorn --no-create-home --user-group RUN apt-get update \ && apt-get install -y -q gcc libssl-dev libpq-dev +# poetry install takes a long time and can be cached if dependencies don't change, +# so that's why we tolerate running it twice. +COPY pyproject.toml poetry.lock /app/ +RUN poetry install --without dev + COPY . /app RUN poetry install --without dev From e460325e600013f9c83ab64ee75587a79964926e Mon Sep 17 00:00:00 2001 From: burnettk Date: Wed, 1 Feb 2023 13:30:45 -0500 Subject: [PATCH 24/40] get some more insight into connector proxy timings --- .../services/service_task_service.py | 33 ++++++++++--------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py index 674ad54d..c5401104 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py @@ -3,6 +3,7 @@ import json from typing import Any import requests +import sentry_sdk from flask import current_app from flask import g @@ -45,27 +46,27 @@ class ServiceTaskDelegate: @staticmethod def call_connector(name: str, bpmn_params: Any, task_data: Any) -> str: """Calls a connector via the configured proxy.""" - params = { - k: ServiceTaskDelegate.check_prefixes(v["value"]) - for k, v in bpmn_params.items() - } - params["spiff__task_data"] = task_data + call_url = f"{connector_proxy_url()}/v1/do/{name}" + with sentry_sdk.start_transaction(op="call-connector", name=call_url): + params = { + k: ServiceTaskDelegate.check_prefixes(v["value"]) + for k, v in bpmn_params.items() + } + params["spiff__task_data"] = task_data - proxied_response = requests.post( - f"{connector_proxy_url()}/v1/do/{name}", json=params - ) + proxied_response = requests.post(call_url, json=params) - parsed_response = json.loads(proxied_response.text) + parsed_response = json.loads(proxied_response.text) - if "refreshed_token_set" not in parsed_response: - return proxied_response.text + if "refreshed_token_set" not in parsed_response: + return proxied_response.text - secret_key = parsed_response["auth"] - refreshed_token_set = json.dumps(parsed_response["refreshed_token_set"]) - user_id = g.user.id if UserService.has_user() else None - SecretService().update_secret(secret_key, refreshed_token_set, user_id) + secret_key = parsed_response["auth"] + refreshed_token_set = json.dumps(parsed_response["refreshed_token_set"]) + user_id = g.user.id if UserService.has_user() else None + SecretService().update_secret(secret_key, refreshed_token_set, user_id) - return json.dumps(parsed_response["api_response"]) + return json.dumps(parsed_response["api_response"]) class ServiceTaskService: From c669aeff2733b7f028a78228d4f5488da1831268 Mon Sep 17 00:00:00 2001 From: burnettk Date: Wed, 1 Feb 2023 13:44:12 -0500 Subject: [PATCH 25/40] there is no need to ever sentry_sdk.start_transaction because the flask integration does that --- .../routes/tasks_controller.py | 19 ++++--------------- .../services/service_task_service.py | 2 +- 2 files changed, 5 insertions(+), 16 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index feb9218e..2879c120 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -425,21 +425,10 @@ def task_submit( terminate_loop: bool = False, ) -> flask.wrappers.Response: """Task_submit_user_data.""" - sentry_op = "controller_action" - sentry_transaction_name = "tasks_controller.task_submit" - transaction = sentry_sdk.Hub.current.scope.transaction - if transaction is None: - current_app.logger.info("transaction was None. pretty sure this never happens.") - with sentry_sdk.start_transaction(op=sentry_op, name=sentry_transaction_name): - return task_submit_shared( - process_instance_id, task_id, body, terminate_loop - ) - else: - current_app.logger.info("transaction existed.") - with transaction.start_child(op=sentry_op, description=sentry_transaction_name): - return task_submit_shared( - process_instance_id, task_id, body, terminate_loop - ) + with sentry_sdk.start_span( + op="controller_action", description="tasks_controller.task_submit" + ): + return task_submit_shared(process_instance_id, task_id, body, terminate_loop) def _get_tasks( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py index c5401104..37af3956 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py @@ -47,7 +47,7 @@ class ServiceTaskDelegate: def call_connector(name: str, bpmn_params: Any, task_data: Any) -> str: """Calls a connector via the configured proxy.""" call_url = f"{connector_proxy_url()}/v1/do/{name}" - with sentry_sdk.start_transaction(op="call-connector", name=call_url): + with sentry_sdk.start_span(op="call-connector", description=call_url): params = { k: ServiceTaskDelegate.check_prefixes(v["value"]) for k, v in bpmn_params.items() From 34800463fd17a72b4061799af5ec9a7d820bb543 Mon Sep 17 00:00:00 2001 From: burnettk Date: Wed, 1 Feb 2023 17:06:34 -0500 Subject: [PATCH 26/40] bulk insert logs for performance improvement --- .../src/spiffworkflow_backend/services/logging_service.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py index 6a60944e..9981e1eb 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py @@ -240,5 +240,8 @@ class DBHandler(logging.Handler): "spiff_step": spiff_step, } ) - if len(self.logs) % 1 == 0: + # so at some point we are going to insert logs. + # we don't want to insert on every log, so we will insert every 100 logs, which is just about as fast as inserting + # on every 1,000 logs. if we get deadlocks in the database, this can be changed to 1 in order to insert on every log. + if len(self.logs) % 100 == 0: self.bulk_insert_logs() From 6a0848f8950f537677983fe39031478e82de438d Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 2 Feb 2023 09:54:19 -0500 Subject: [PATCH 27/40] add keycloak users --- .../realm_exports/spiffworkflow-realm.json | 208 ++++++++++++++++-- .../keycloak/test_user_lists/status | 8 + 2 files changed, 192 insertions(+), 24 deletions(-) diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index a32acf00..634caef7 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -854,6 +854,46 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "672167fd-ae79-47a7-8429-f3bb1bd4ee55", + "createdTimestamp" : 1675349217829, + "username" : "infra1.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "infra1.sme@status.im", + "credentials" : [ { + "id" : "bd5843bf-98cc-4891-ab03-693a5d69078b", + "type" : "password", + "createdDate" : 1675349217863, + "secretData" : "{\"value\":\"A78sm/+e2x/N/3A7Pk05eKhfANp+ZO9BQA3LYMwpzQ5KK2D/Ot8d1plOnqMT61rTnnCgxP8dtlA6/Ws61CMTYg==\",\"salt\":\"XOOknamJPwXD1LDj6LEodA==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "40891b68-121f-4fdb-86c0-0f52836d7e65", + "createdTimestamp" : 1675349217890, + "username" : "infra2.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "infra2.sme@status.im", + "credentials" : [ { + "id" : "7e9927e2-ef7f-4247-b663-1f59147a9066", + "type" : "password", + "createdDate" : 1675349217926, + "secretData" : "{\"value\":\"j4M9u8p9FDCitGpb7JXM9JWFVGvBu7R2TOYG79c+Witl7gfWppues9fFzhlFyXgC78v6diHoQ4LwCwJGJS3loQ==\",\"salt\":\"H+i8qv6ulrBEZla/v8gDDw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "1561518b-c327-491e-9db3-23c2b5394104", "createdTimestamp" : 1669303773974, @@ -1043,6 +1083,46 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "eff82d12-9a67-4002-b3c5-37811bd45199", + "createdTimestamp" : 1675349217585, + "username" : "legal.program-lead.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "legal.program-lead.sme@status.im", + "credentials" : [ { + "id" : "933e3fc4-398a-46c3-bc4d-783ab29a0a5b", + "type" : "password", + "createdDate" : 1675349217655, + "secretData" : "{\"value\":\"x2M9khnGK+VCykoWbZKEcHNv5QMAcumqLa7+o+STJV8UYt7BobSBn7w1r3cbyYlvkgoWIglG8S2nLDFFb6hAQg==\",\"salt\":\"/lQYRrsUY1BxNUOZSKaZwA==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "8cd6feba-5ca6-4cfb-bc1a-a52c80595783", + "createdTimestamp" : 1675349217698, + "username" : "legal.project-lead.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "legal.project-lead.sme@status.im", + "credentials" : [ { + "id" : "908f858c-d3cd-47a9-b611-a1d48f0247e5", + "type" : "password", + "createdDate" : 1675349217733, + "secretData" : "{\"value\":\"r53SXu0dp6FrSJAVLHYrfwSKPZY9OKHfHBuJDEE2DCbZiQRH77C4sZWfUwbu/6OOhTtiBEe7gz2DQpimIDY4RQ==\",\"salt\":\"+g/OXXJEMkQiahmjSylAkw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "2a3176a0-8dd5-4223-a3e1-3cac4134e474", "createdTimestamp" : 1674148695030, @@ -1063,6 +1143,26 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "3d62ca4e-88bc-4302-89c1-8741c771147e", + "createdTimestamp" : 1675349217762, + "username" : "legal1.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "legal1.sme@status.im", + "credentials" : [ { + "id" : "b774d46d-a3e8-417f-97c6-2d2102a54b0b", + "type" : "password", + "createdDate" : 1675349217799, + "secretData" : "{\"value\":\"PF21YsnIoYZLJFT/y1i2FV4OmaQj8dRsalZ9R2PK6t/jKze3ds4k+I7WVe4h2H0hMB9fo9cSQ7kt2ygxfEBheg==\",\"salt\":\"5sOkSXzRSgNz7lHfUbKzdQ==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "6f5bfa09-7494-4a2f-b871-cf327048cac7", "createdTimestamp" : 1665517010600, @@ -1225,6 +1325,46 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "ace0432f-1818-4210-8bcf-15533abfb3ce", + "createdTimestamp" : 1675349217958, + "username" : "security.program-lead.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "security.program-lead.sme@status.im", + "credentials" : [ { + "id" : "602512dd-b24f-458c-9cef-7271bd8177bc", + "type" : "password", + "createdDate" : 1675349217993, + "secretData" : "{\"value\":\"vUb+t9ukHz3oHGUxaYUP34riZrshZU4c3iWpHB0OzI3y0ggCeT9xFEcmrwdkfilkKvCBJxLswlirWmgnmxZH0w==\",\"salt\":\"0hzZkDK4hPH5xgR1TpyG1Q==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "6272ac80-1d79-4e3c-a5c1-b31660560318", + "createdTimestamp" : 1675349218020, + "username" : "security.project-lead.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "security.project-lead.sme@status.im", + "credentials" : [ { + "id" : "eb7673bf-50f1-40af-927b-162f536f6187", + "type" : "password", + "createdDate" : 1675349218054, + "secretData" : "{\"value\":\"E1eLmC7hCcv7I5X30TfMvpZv3MtHH+rVhgLrZnBJSUvsrXmRkHWScJ/POHQLwUgCLJeU/lKDP/f0TdO2PvHiow==\",\"salt\":\"dWM5XJIR7m/eZ0YlHmuC3A==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "74374cda-1516-48e5-9ef2-1fd7bcee84d3", "createdTimestamp" : 1674148695088, @@ -1245,6 +1385,26 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "98faab0c-d2af-4794-8491-03dad5f30c63", + "createdTimestamp" : 1675349218087, + "username" : "security1.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "security1.sme@status.im", + "credentials" : [ { + "id" : "37bd6b9b-015b-4790-8a4f-883c47035bc4", + "type" : "password", + "createdDate" : 1675349218122, + "secretData" : "{\"value\":\"BJP9K4qIdnaDnE3meM2GLWMFdSJryxcZovtKDlZNaQXfSUH3X1mOJfaLXQsuTWJzSMIow8XZ5+ye47ZNabLCaQ==\",\"salt\":\"BqD7jPpdB7PzU6QTN5dpMA==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "487d3a85-89dd-4839-957a-c3f6d70551f6", "createdTimestamp" : 1657115173081, @@ -2514,7 +2674,7 @@ "subType" : "authenticated", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "saml-user-property-mapper", "oidc-address-mapper", "oidc-full-name-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper" ] } }, { "id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", @@ -2532,7 +2692,7 @@ "subType" : "anonymous", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "saml-role-list-mapper", "oidc-usermodel-property-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper" ] + "allowed-protocol-mapper-types" : [ "saml-role-list-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-address-mapper", "oidc-usermodel-property-mapper" ] } }, { "id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c", @@ -2622,7 +2782,7 @@ "internationalizationEnabled" : false, "supportedLocales" : [ ], "authenticationFlows" : [ { - "id" : "a91920d9-792e-486f-9a02-49fe00857ce5", + "id" : "feafc299-fede-4880-9e23-eb81aca22808", "alias" : "Account verification options", "description" : "Method with which to verity the existing account", "providerId" : "basic-flow", @@ -2644,7 +2804,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "6b8f504c-39fb-4608-9223-52deb5ae0dfe", + "id" : "ce7904d0-9182-49a2-aa71-a7b43e21f3ac", "alias" : "Authentication Options", "description" : "Authentication options.", "providerId" : "basic-flow", @@ -2673,7 +2833,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "ac4dd6f3-43b2-4212-90eb-4df7c9a6a0bc", + "id" : "d9c6909a-5cc1-4ddf-b297-dbfcf6e609a6", "alias" : "Browser - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2695,7 +2855,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "726b4a58-cb78-4105-a34c-3e4404c74362", + "id" : "083a589e-a486-42b6-ae73-1ec983967ff5", "alias" : "Direct Grant - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2717,7 +2877,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "be1b5f5d-b80f-46a6-804b-bce20e2de246", + "id" : "7f0248b0-2d51-4175-9fd2-52b606a39e26", "alias" : "First broker login - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2739,7 +2899,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "ff5097d8-818a-4176-8512-caf9d81eb6db", + "id" : "44465f1f-c700-4ec0-a234-d95c994c9e25", "alias" : "Handle Existing Account", "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", "providerId" : "basic-flow", @@ -2761,7 +2921,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "b9ecf989-e87b-45c0-a440-bce46b473dec", + "id" : "8cf09055-5b98-4fc8-b867-3dffacdec21b", "alias" : "Reset - Conditional OTP", "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", "providerId" : "basic-flow", @@ -2783,7 +2943,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "4554310c-e125-4834-a84e-53bbec7a79d6", + "id" : "16b50b3e-4240-4f49-a85e-1bfd40def300", "alias" : "User creation or linking", "description" : "Flow for the existing/non-existing user alternatives", "providerId" : "basic-flow", @@ -2806,7 +2966,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "204549aa-c931-45a2-b2f0-1a5a0c724935", + "id" : "2aa981ae-d67e-49fb-95a4-91de1e5ab724", "alias" : "Verify Existing Account by Re-authentication", "description" : "Reauthentication of existing account", "providerId" : "basic-flow", @@ -2828,7 +2988,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "d02f58b1-6469-46ea-a348-d923b5aa9727", + "id" : "cf8406f7-09c3-4614-a898-99c9d66746f6", "alias" : "browser", "description" : "browser based authentication", "providerId" : "basic-flow", @@ -2864,7 +3024,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7ef6a658-be09-4b81-91ac-f21dc80b0841", + "id" : "e1ec7d6e-7612-4c5b-afce-c7f4fddbf6ec", "alias" : "clients", "description" : "Base authentication for clients", "providerId" : "client-flow", @@ -2900,7 +3060,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "f7f2eeab-6455-4a18-a98d-b1a5f04e35fb", + "id" : "f5862b09-6e01-4c88-b44e-26dc59d71b80", "alias" : "direct grant", "description" : "OpenID Connect Resource Owner Grant", "providerId" : "basic-flow", @@ -2929,7 +3089,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "c44389c2-08b2-4adb-a6e9-e41006cb20c7", + "id" : "7caa8611-8b13-437e-83b2-556899b5444f", "alias" : "docker auth", "description" : "Used by Docker clients to authenticate against the IDP", "providerId" : "basic-flow", @@ -2944,7 +3104,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "edf00de8-8f19-4a32-98c4-15e719c1fadd", + "id" : "91d40deb-344f-4e0b-a845-98b2fc4a633a", "alias" : "first broker login", "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", "providerId" : "basic-flow", @@ -2967,7 +3127,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "58415605-eb47-41b3-a07f-90bbbbcb9963", + "id" : "f221b5e6-1bcc-4b37-ba61-4d3bc6a30a8b", "alias" : "forms", "description" : "Username, password, otp and other auth forms.", "providerId" : "basic-flow", @@ -2989,7 +3149,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "1eae6099-3e1e-484b-ad94-b09339affb68", + "id" : "3ed8e597-19af-4ec8-b532-a97311f52de3", "alias" : "http challenge", "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", "providerId" : "basic-flow", @@ -3011,7 +3171,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "8af03739-b77a-4582-ab63-a1855ca4f637", + "id" : "3970fd16-3786-4eb3-9efe-453d0984b18b", "alias" : "registration", "description" : "registration flow", "providerId" : "basic-flow", @@ -3027,7 +3187,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "0c308998-c5ad-4cf8-ab5c-15be89cbe4d7", + "id" : "e26b27b4-c957-491c-bb6d-9d226b22399c", "alias" : "registration form", "description" : "registration form", "providerId" : "form-flow", @@ -3063,7 +3223,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "5510aa65-e78d-4d08-a3ca-31e277bc3cd0", + "id" : "3ae37429-a623-42e3-a4a1-f9586b96b730", "alias" : "reset credentials", "description" : "Reset credentials for a user if they forgot their password or something", "providerId" : "basic-flow", @@ -3099,7 +3259,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "b6b3e35d-8df3-487e-b2d2-9fdf524a4181", + "id" : "7606ecd5-eb13-4aee-bd9f-3ec4ce77c59c", "alias" : "saml ecp", "description" : "SAML ECP Profile Authentication Flow", "providerId" : "basic-flow", @@ -3115,13 +3275,13 @@ } ] } ], "authenticatorConfig" : [ { - "id" : "a2e9294b-74ce-4ea6-8372-9d9fb3d60a06", + "id" : "058b3c89-4ea4-43fa-b337-e523b1d93ec3", "alias" : "create unique user config", "config" : { "require.password.update.after.registration" : "false" } }, { - "id" : "de65a90c-cc4b-4bf0-8e84-756e23a504f0", + "id" : "21410ac7-4b82-4f19-aae2-43ac33ba3f8f", "alias" : "review profile config", "config" : { "update.profile.on.first.login" : "missing" diff --git a/spiffworkflow-backend/keycloak/test_user_lists/status b/spiffworkflow-backend/keycloak/test_user_lists/status index 651e76da..667c4f03 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/status +++ b/spiffworkflow-backend/keycloak/test_user_lists/status @@ -15,3 +15,11 @@ dao.project.lead@status.im desktop.project.lead@status.im app.program.lead@status.im desktop.program.lead@status.im +legal.program-lead.sme@status.im +legal.project-lead.sme@status.im +legal1.sme@status.im +infra1.sme@status.im +infra2.sme@status.im +security.program-lead.sme@status.im +security.project-lead.sme@status.im +security1.sme@status.im From a8158637272a306a34256d664c7c3ff538421844 Mon Sep 17 00:00:00 2001 From: jbirddog <100367399+jbirddog@users.noreply.github.com> Date: Thu, 2 Feb 2023 10:24:55 -0500 Subject: [PATCH 28/40] Allow for different Python Environments when executing scripts within SpiffWorkflow (#121) --- spiffworkflow-backend/poetry.lock | 5 +- .../services/process_instance_processor.py | 184 ++++++++++++++++-- .../services/script_unit_test_runner.py | 2 + .../scripts/test_get_localtime.py | 3 +- 4 files changed, 168 insertions(+), 26 deletions(-) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index 312890e5..733c84ac 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1825,7 +1825,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "98c6294f1240aee599cd98bcee58d121cb57b331" +resolved_reference = "64737498caa36c25b12f5216bdc9c30338b2a1fa" [[package]] name = "SQLAlchemy" @@ -2863,10 +2863,7 @@ orjson = [ {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"}, {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"}, {file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"}, - {file = "orjson-3.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:200eae21c33f1f8b02a11f5d88d76950cd6fd986d88f1afe497a8ae2627c49aa"}, - {file = "orjson-3.8.0-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9529990f3eab54b976d327360aa1ff244a4b12cb5e4c5b3712fcdd96e8fe56d4"}, {file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"}, - {file = "orjson-3.8.0-cp311-none-win_amd64.whl", hash = "sha256:b21c7af0ff6228ca7105f54f0800636eb49201133e15ddb80ac20c1ce973ef07"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"}, {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"}, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 7cec48a1..40458838 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -26,8 +26,10 @@ from lxml import etree # type: ignore from lxml.etree import XMLSyntaxError # type: ignore from RestrictedPython import safe_globals # type: ignore from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore -from SpiffWorkflow.bpmn.PythonScriptEngine import Box # type: ignore -from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine # type: ignore +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BasePythonScriptEngineEnvironment # type: ignore +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BoxedTaskDataEnvironment from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore @@ -150,6 +152,132 @@ class ProcessInstanceLockedBySomethingElseError(Exception): pass +class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # type: ignore + def __init__(self, environment_globals: Dict[str, Any]): + """BoxedTaskDataBasedScriptEngineEnvironment.""" + self._last_result: Dict[str, Any] = {} + super().__init__(environment_globals) + + def execute( + self, + script: str, + context: Dict[str, Any], + external_methods: Optional[Dict[str, Any]] = None, + ) -> None: + super().execute(script, context, external_methods) + self._last_result = context + + def last_result(self) -> Dict[str, Any]: + return self._last_result + + def clear_state(self) -> None: + pass + + def preserve_state(self, bpmn_process_instance: BpmnWorkflow) -> None: + pass + + def restore_state(self, bpmn_process_instance: BpmnWorkflow) -> None: + pass + + def finalize_result(self, bpmn_process_instance: BpmnWorkflow) -> None: + pass + + def revise_state_with_task_data(self, task: SpiffTask) -> None: + pass + + +class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment): # type: ignore + PYTHON_ENVIRONMENT_STATE_KEY = "spiff__python_env_state" + + def __init__(self, environment_globals: Dict[str, Any]): + """NonTaskDataBasedScriptEngineEnvironment.""" + self.state: Dict[str, Any] = {} + self.non_user_defined_keys = set( + [*environment_globals.keys()] + ["__builtins__", "current_user"] + ) + super().__init__(environment_globals) + + def evaluate( + self, + expression: str, + context: Dict[str, Any], + external_methods: Optional[dict[str, Any]] = None, + ) -> Any: + # TODO: once integrated look at the tests that fail without Box + Box.convert_to_box(context) + state = {} + state.update(self.globals) + state.update(external_methods or {}) + state.update(self.state) + state.update(context) + return eval(expression, state) # noqa + + def execute( + self, + script: str, + context: Dict[str, Any], + external_methods: Optional[Dict[str, Any]] = None, + ) -> None: + # TODO: once integrated look at the tests that fail without Box + Box.convert_to_box(context) + self.state.update(self.globals) + self.state.update(external_methods or {}) + self.state.update(context) + exec(script, self.state) # noqa + + self.state = self._user_defined_state(external_methods) + + # the task data needs to be updated with the current state so data references can be resolved properly. + # the state will be removed later once the task is completed. + context.update(self.state) + + def _user_defined_state( + self, external_methods: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + keys_to_filter = self.non_user_defined_keys + if external_methods is not None: + keys_to_filter |= set(external_methods.keys()) + + return { + k: v + for k, v in self.state.items() + if k not in keys_to_filter and not callable(v) + } + + def last_result(self) -> Dict[str, Any]: + return self.state + + def clear_state(self) -> None: + self.state = {} + + def preserve_state(self, bpmn_process_instance: BpmnWorkflow) -> None: + key = self.PYTHON_ENVIRONMENT_STATE_KEY + state = self._user_defined_state() + bpmn_process_instance.data[key] = state + + def restore_state(self, bpmn_process_instance: BpmnWorkflow) -> None: + key = self.PYTHON_ENVIRONMENT_STATE_KEY + self.state = bpmn_process_instance.data.get(key, {}) + + def finalize_result(self, bpmn_process_instance: BpmnWorkflow) -> None: + bpmn_process_instance.data.update(self._user_defined_state()) + + def revise_state_with_task_data(self, task: SpiffTask) -> None: + state_keys = set(self.state.keys()) + task_data_keys = set(task.data.keys()) + state_keys_to_remove = state_keys - task_data_keys + task_data_keys_to_keep = task_data_keys - state_keys + + self.state = { + k: v for k, v in self.state.items() if k not in state_keys_to_remove + } + task.data = {k: v for k, v in task.data.items() if k in task_data_keys_to_keep} + + +class CustomScriptEngineEnvironment(BoxedTaskDataBasedScriptEngineEnvironment): + pass + + class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore """This is a custom script processor that can be easily injected into Spiff Workflow. @@ -179,7 +307,9 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore default_globals.update(safe_globals) default_globals["__builtins__"]["__import__"] = _import - super().__init__(default_globals=default_globals) + environment = CustomScriptEngineEnvironment(default_globals) + + super().__init__(environment=environment) def __get_augment_methods(self, task: SpiffTask) -> Dict[str, Callable]: """__get_augment_methods.""" @@ -392,7 +522,7 @@ class ProcessInstanceProcessor: validate_only, subprocesses=subprocesses, ) - self.bpmn_process_instance.script_engine = self._script_engine + self.set_script_engine(self.bpmn_process_instance) self.add_user_info_to_process_instance(self.bpmn_process_instance) except MissingSpecError as ke: @@ -438,6 +568,18 @@ class ProcessInstanceProcessor: bpmn_process_spec, subprocesses ) + @staticmethod + def set_script_engine(bpmn_process_instance: BpmnWorkflow) -> None: + ProcessInstanceProcessor._script_engine.environment.restore_state( + bpmn_process_instance + ) + bpmn_process_instance.script_engine = ProcessInstanceProcessor._script_engine + + def preserve_script_engine_state(self) -> None: + ProcessInstanceProcessor._script_engine.environment.preserve_state( + self.bpmn_process_instance + ) + def current_user(self) -> Any: """Current_user.""" current_user = None @@ -470,11 +612,12 @@ class ProcessInstanceProcessor: subprocesses: Optional[IdToBpmnProcessSpecMapping] = None, ) -> BpmnWorkflow: """Get_bpmn_process_instance_from_workflow_spec.""" - return BpmnWorkflow( + bpmn_process_instance = BpmnWorkflow( spec, - script_engine=ProcessInstanceProcessor._script_engine, subprocess_specs=subprocesses, ) + ProcessInstanceProcessor.set_script_engine(bpmn_process_instance) + return bpmn_process_instance @staticmethod def __get_bpmn_process_instance( @@ -501,9 +644,7 @@ class ProcessInstanceProcessor: finally: spiff_logger.setLevel(original_spiff_logger_log_level) - bpmn_process_instance.script_engine = ( - ProcessInstanceProcessor._script_engine - ) + ProcessInstanceProcessor.set_script_engine(bpmn_process_instance) else: bpmn_process_instance = ( ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec( @@ -1384,25 +1525,25 @@ class ProcessInstanceProcessor: def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None: """Do_engine_steps.""" step_details = [] + + def did_complete_task(task: SpiffTask) -> None: + self._script_engine.environment.revise_state_with_task_data(task) + step_details.append(self.spiff_step_details_mapping()) + try: - self.bpmn_process_instance.refresh_waiting_tasks( - # - # commenting out to see if this helps with the growing spiff steps/db issue - # - # will_refresh_task=lambda t: self.increment_spiff_step(), - # did_refresh_task=lambda t: step_details.append( - # self.spiff_step_details_mapping() - # ), - ) + self.bpmn_process_instance.refresh_waiting_tasks() self.bpmn_process_instance.do_engine_steps( exit_at=exit_at, will_complete_task=lambda t: self.increment_spiff_step(), - did_complete_task=lambda t: step_details.append( - self.spiff_step_details_mapping() - ), + did_complete_task=did_complete_task, ) + if self.bpmn_process_instance.is_completed(): + self._script_engine.environment.finalize_result( + self.bpmn_process_instance + ) + self.process_bpmn_messages() self.queue_waiting_receive_messages() @@ -1466,6 +1607,7 @@ class ProcessInstanceProcessor: def serialize(self) -> str: """Serialize.""" self.check_task_data_size() + self.preserve_script_engine_state() return self._serializer.serialize_json(self.bpmn_process_instance) # type: ignore def next_user_tasks(self) -> list[SpiffTask]: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py index 1fafb548..310f53e9 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py @@ -45,6 +45,7 @@ class ScriptUnitTestRunner: context = input_context.copy() try: + cls._script_engine.environment.clear_state() cls._script_engine._execute(context=context, script=script) except SyntaxError as ex: return ScriptUnitTestResult( @@ -77,6 +78,7 @@ class ScriptUnitTestRunner: error=f"Failed to execute script: {error_message}", ) + context = cls._script_engine.environment.last_result() result_as_boolean = context == expected_output_context script_unit_test_result = ScriptUnitTestResult( diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py index 90e4158d..8116ec42 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py @@ -87,7 +87,8 @@ class TestGetLocaltime(BaseTest): ) assert spiff_task - data = spiff_task.data + + data = ProcessInstanceProcessor._script_engine.environment.last_result() some_time = data["some_time"] localtime = data["localtime"] timezone = data["timezone"] From 4240946334ca95d69bfb87f5f2831ca810884e10 Mon Sep 17 00:00:00 2001 From: jbirddog <100367399+jbirddog@users.noreply.github.com> Date: Thu, 2 Feb 2023 14:44:37 -0500 Subject: [PATCH 29/40] File download from workflow data (#122) --- .../src/spiffworkflow_backend/api.yml | 39 +++++++++++++ .../routes/process_api_blueprint.py | 57 ++++++++++++++++++- .../src/spiffworkflow_backend/routes/user.py | 5 ++ .../scripts/markdown_file_download_link.py | 51 +++++++++++++++++ 4 files changed, 151 insertions(+), 1 deletion(-) create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 825a24b4..326d55b6 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -1605,6 +1605,45 @@ paths: schema: $ref: "#/components/schemas/Workflow" + /process-data-file-download/{modified_process_model_identifier}/{process_instance_id}/{process_data_identifier}: + parameters: + - name: modified_process_model_identifier + in: path + required: true + description: The modified id of an existing process model + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + - name: process_data_identifier + in: path + required: true + description: The identifier of the process data. + schema: + type: string + - name: index + in: query + required: false + description: The optional index of the value if key's value is an array + schema: + type: integer + get: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_data_file_download + summary: Download the file referneced in the process data value. + tags: + - Data Objects + responses: + "200": + description: Fetch succeeded. + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + /send-event/{modified_process_model_identifier}/{process_instance_id}: parameters: - name: modified_process_model_identifier diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 0e9bd581..82263475 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -1,7 +1,9 @@ """APIs for dealing with process groups, process models, and process instances.""" +import base64 import json from typing import Any from typing import Dict +from typing import Optional import flask.wrappers from flask import Blueprint @@ -81,10 +83,12 @@ def process_list() -> Any: return SpecReferenceSchema(many=True).dump(references) -def process_data_show( +def _process_data_fetcher( process_instance_id: int, process_data_identifier: str, modified_process_model_identifier: str, + download_file_data: bool, + index: Optional[int] = None, ) -> flask.wrappers.Response: """Process_data_show.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) @@ -94,6 +98,26 @@ def process_data_show( if process_data_identifier in all_process_data: process_data_value = all_process_data[process_data_identifier] + if process_data_value is not None and index is not None: + process_data_value = process_data_value[index] + + if ( + download_file_data + and isinstance(process_data_value, str) + and process_data_value.startswith("data:") + ): + parts = process_data_value.split(";") + mimetype = parts[0][4:] + filename = parts[1] + base64_value = parts[2].split(",")[1] + file_contents = base64.b64decode(base64_value) + + return Response( + file_contents, + mimetype=mimetype, + headers={"Content-disposition": f"attachment; filename={filename}"}, + ) + return make_response( jsonify( { @@ -105,6 +129,37 @@ def process_data_show( ) +def process_data_show( + process_instance_id: int, + process_data_identifier: str, + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Process_data_show.""" + return _process_data_fetcher( + process_instance_id, + process_data_identifier, + modified_process_model_identifier, + False, + None, + ) + + +def process_data_file_download( + process_instance_id: int, + process_data_identifier: str, + modified_process_model_identifier: str, + index: Optional[int] = None, +) -> flask.wrappers.Response: + """Process_data_file_download.""" + return _process_data_fetcher( + process_instance_id, + process_data_identifier, + modified_process_model_identifier, + True, + index, + ) + + # sample body: # {"ref": "refs/heads/main", "repository": {"name": "sample-process-models", # "full_name": "sartography/sample-process-models", "private": False .... }} diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py index 6873198a..6fd7d39c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py @@ -17,6 +17,7 @@ from flask import request from werkzeug.wrappers import Response from spiffworkflow_backend.exceptions.api_error import ApiError +from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.authentication_service import AuthenticationService from spiffworkflow_backend.services.authentication_service import ( @@ -58,6 +59,10 @@ def verify_token( if not token and "Authorization" in request.headers: token = request.headers["Authorization"].removeprefix("Bearer ") + if not token and "access_token" in request.cookies: + if request.path.startswith(f"{V1_API_PATH_PREFIX}/process-data-file-download/"): + token = request.cookies["access_token"] + # This should never be set here but just in case _clear_auth_tokens_from_thread_local_data() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py new file mode 100644 index 00000000..3952525b --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py @@ -0,0 +1,51 @@ +"""Markdown_file_download_link.""" +from typing import Any +from urllib.parse import unquote + +from flask import current_app + +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.models.script_attributes_context import ( + ScriptAttributesContext, +) +from spiffworkflow_backend.scripts.script import Script + + +class GetMarkdownFileDownloadLink(Script): + """GetMarkdownFileDownloadLink.""" + + @staticmethod + def requires_privileged_permissions() -> bool: + """We have deemed this function safe to run without elevated permissions.""" + return False + + def get_description(self) -> str: + """Get_description.""" + return """Returns a string which is a string in markdown format.""" + + def run( + self, + script_attributes_context: ScriptAttributesContext, + *_args: Any, + **kwargs: Any, + ) -> Any: + """Run.""" + # example input: + # "data:application/pdf;name=Harmeet_1234.pdf;base64,JV...." + process_data_identifier = kwargs["key"] + parts = kwargs["file_data"].split(";") + file_index = kwargs["file_index"] + label = unquote(parts[1].split("=")[1]) + process_model_identifier = script_attributes_context.process_model_identifier + modified_process_model_identifier = ( + ProcessModelInfo.modify_process_identifier_for_path_param( + process_model_identifier + ) + ) + process_instance_id = script_attributes_context.process_instance_id + url = current_app.config["SPIFFWORKFLOW_BACKEND_URL"] + url += f"/v1.0/process-data-file-download/{modified_process_model_identifier}/" + f"{process_instance_id}/{process_data_identifier}?index={file_index}" + link = f"[{label}]({url})" + + return link From 643fef7c2079ece2eb30fba8a459497ca357e97c Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Thu, 2 Feb 2023 15:04:57 -0500 Subject: [PATCH 30/40] Quick fix for url building --- .../scripts/markdown_file_download_link.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py index 3952525b..d1b3af7f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py @@ -44,8 +44,8 @@ class GetMarkdownFileDownloadLink(Script): ) process_instance_id = script_attributes_context.process_instance_id url = current_app.config["SPIFFWORKFLOW_BACKEND_URL"] - url += f"/v1.0/process-data-file-download/{modified_process_model_identifier}/" - f"{process_instance_id}/{process_data_identifier}?index={file_index}" + url += f"/v1.0/process-data-file-download/{modified_process_model_identifier}/" + \ + f"{process_instance_id}/{process_data_identifier}?index={file_index}" link = f"[{label}]({url})" return link From 4934014ba90ebf1f72602c30a9ab5fbe6251cbfc Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 2 Feb 2023 15:40:01 -0500 Subject: [PATCH 31/40] simplify spiff integration post serializer update, w/ elizabeth and jon --- spiffworkflow-backend/poetry.lock | 5 +- .../scripts/markdown_file_download_link.py | 6 ++- .../services/process_instance_processor.py | 51 +++---------------- 3 files changed, 15 insertions(+), 47 deletions(-) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index 733c84ac..570faf85 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1825,7 +1825,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "64737498caa36c25b12f5216bdc9c30338b2a1fa" +resolved_reference = "0e61be85c47474a33037e6f398e64c96e02f13ad" [[package]] name = "SQLAlchemy" @@ -2546,6 +2546,7 @@ greenlet = [ {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"}, + {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"}, {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"}, {file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"}, {file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"}, @@ -2554,6 +2555,7 @@ greenlet = [ {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"}, + {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"}, {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"}, {file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"}, {file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"}, @@ -2562,6 +2564,7 @@ greenlet = [ {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"}, + {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"}, {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"}, {file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"}, {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py index d1b3af7f..25f81cc7 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/markdown_file_download_link.py @@ -44,8 +44,10 @@ class GetMarkdownFileDownloadLink(Script): ) process_instance_id = script_attributes_context.process_instance_id url = current_app.config["SPIFFWORKFLOW_BACKEND_URL"] - url += f"/v1.0/process-data-file-download/{modified_process_model_identifier}/" + \ - f"{process_instance_id}/{process_data_identifier}?index={file_index}" + url += ( + f"/v1.0/process-data-file-download/{modified_process_model_identifier}/" + + f"{process_instance_id}/{process_data_identifier}?index={file_index}" + ) link = f"[{label}]({url})" return link diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 40458838..b45add69 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -38,36 +38,14 @@ from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent # type: ignor from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ignore from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore -from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter # type: ignore +from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter # type: ignore from SpiffWorkflow.exceptions import WorkflowException # type: ignore from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore -from SpiffWorkflow.spiff.serializer.task_spec_converters import BoundaryEventConverter # type: ignore -from SpiffWorkflow.spiff.serializer.task_spec_converters import ( - CallActivityTaskConverter, -) -from SpiffWorkflow.spiff.serializer.task_spec_converters import EndEventConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import ( +from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ignore +from SpiffWorkflow.spiff.serializer.task_spec_converters import ( # type: ignore EventBasedGatewayConverter, ) -from SpiffWorkflow.spiff.serializer.task_spec_converters import ( - IntermediateCatchEventConverter, -) -from SpiffWorkflow.spiff.serializer.task_spec_converters import ( - IntermediateThrowEventConverter, -) -from SpiffWorkflow.spiff.serializer.task_spec_converters import ManualTaskConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import NoneTaskConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import ReceiveTaskConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import ScriptTaskConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import SendTaskConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import ServiceTaskConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import StartEventConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import SubWorkflowTaskConverter -from SpiffWorkflow.spiff.serializer.task_spec_converters import ( - TransactionSubprocessConverter, -) -from SpiffWorkflow.spiff.serializer.task_spec_converters import UserTaskConverter from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore @@ -110,6 +88,8 @@ from spiffworkflow_backend.services.service_task_service import ServiceTaskDeleg from spiffworkflow_backend.services.spec_file_service import SpecFileService from spiffworkflow_backend.services.user_service import UserService +SPIFF_SPEC_CONFIG["task_specs"].append(BusinessRuleTaskConverter) + # Sorry about all this crap. I wanted to move this thing to another file, but # importing a bunch of types causes circular imports. @@ -408,26 +388,9 @@ class ProcessInstanceProcessor: _script_engine = CustomBpmnScriptEngine() SERIALIZER_VERSION = "1.0-spiffworkflow-backend" + wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter( - [ - BoundaryEventConverter, - BusinessRuleTaskConverter, - CallActivityTaskConverter, - EndEventConverter, - IntermediateCatchEventConverter, - IntermediateThrowEventConverter, - EventBasedGatewayConverter, - ManualTaskConverter, - NoneTaskConverter, - ReceiveTaskConverter, - ScriptTaskConverter, - SendTaskConverter, - ServiceTaskConverter, - StartEventConverter, - SubWorkflowTaskConverter, - TransactionSubprocessConverter, - UserTaskConverter, - ] + SPIFF_SPEC_CONFIG ) _serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION) _event_serializer = EventBasedGatewayConverter() From 3d5647d1664c3bb44378fce8c066af87953d5582 Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 2 Feb 2023 19:00:58 -0500 Subject: [PATCH 32/40] try to improve exception handling by avoiding raising ApiError from services --- .../exceptions/api_error.py | 31 +++++++++++++--- .../services/authentication_service.py | 24 ++++++++++--- .../services/authorization_service.py | 36 ++++++++----------- .../src/components/ProcessGroupForm.tsx | 2 +- 4 files changed, 63 insertions(+), 30 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py index 02a66a20..ab5bf1c3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py @@ -20,6 +20,11 @@ from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.specs.base import TaskSpec # type: ignore from SpiffWorkflow.task import Task # type: ignore +from spiffworkflow_backend.services.authentication_service import NotAuthorizedError +from spiffworkflow_backend.services.authentication_service import TokenInvalidError +from spiffworkflow_backend.services.authentication_service import TokenNotProvidedError +from spiffworkflow_backend.services.authentication_service import UserNotLoggedInError + api_error_blueprint = Blueprint("api_error_blueprint", __name__) @@ -172,7 +177,12 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response: set_user_sentry_context() sentry_link = None - if not isinstance(exception, ApiError) or exception.error_code != "invalid_token": + # we want to capture_exception to log the exception to sentry, but we don't want to log: + # 1. ApiErrors that are just invalid tokens + # 2. NotAuthorizedError + if ( + not isinstance(exception, ApiError) or exception.error_code != "invalid_token" + ) and not isinstance(exception, NotAuthorizedError): id = capture_exception(exception) if isinstance(exception, ApiError): @@ -193,17 +203,30 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response: # an event id or send out tags like username current_app.logger.exception(exception) + error_code = "internal_server_error" + status_code = 500 + if ( + isinstance(exception, NotAuthorizedError) + or isinstance(exception, TokenNotProvidedError) + or isinstance(exception, TokenInvalidError) + ): + error_code = "not_authorized" + status_code = 403 + if isinstance(exception, UserNotLoggedInError): + error_code = "not_authenticated" + status_code = 401 + # set api_exception like this to avoid confusing mypy - # and what type the object is + # about what type the object is api_exception = None if isinstance(exception, ApiError): api_exception = exception else: api_exception = ApiError( - error_code="internal_server_error", + error_code=error_code, message=f"{exception.__class__.__name__}", sentry_link=sentry_link, - status_code=500, + status_code=status_code, ) return make_response(jsonify(api_exception), api_exception.status_code) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py index 1793aab6..5c9c4708 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py @@ -11,7 +11,6 @@ from flask import current_app from flask import redirect from werkzeug.wrappers import Response -from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.refresh_token import RefreshTokenModel @@ -20,7 +19,21 @@ class MissingAccessTokenError(Exception): """MissingAccessTokenError.""" +class NotAuthorizedError(Exception): + pass + + +class RefreshTokenStorageError(Exception): + pass + + +class UserNotLoggedInError(Exception): + pass + + # These could be either 'id' OR 'access' tokens and we can't always know which + + class TokenExpiredError(Exception): """TokenExpiredError.""" @@ -29,6 +42,10 @@ class TokenInvalidError(Exception): """TokenInvalidError.""" +class TokenNotProvidedError(Exception): + pass + + class AuthenticationProviderTypes(enum.Enum): """AuthenticationServiceProviders.""" @@ -183,9 +200,8 @@ class AuthenticationService: db.session.commit() except Exception as e: db.session.rollback() - raise ApiError( - error_code="store_refresh_token_error", - message=f"We could not store the refresh token. Original error is {e}", + raise RefreshTokenStorageError( + f"We could not store the refresh token. Original error is {e}", ) from e @staticmethod diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py index 19f9f418..a72effd4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py @@ -21,7 +21,6 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore from sqlalchemy import or_ from sqlalchemy import text -from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.group import GroupModel @@ -34,6 +33,11 @@ from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserNotFoundError from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel from spiffworkflow_backend.routes.openid_blueprint import openid_blueprint +from spiffworkflow_backend.services.authentication_service import NotAuthorizedError +from spiffworkflow_backend.services.authentication_service import TokenExpiredError +from spiffworkflow_backend.services.authentication_service import TokenInvalidError +from spiffworkflow_backend.services.authentication_service import TokenNotProvidedError +from spiffworkflow_backend.services.authentication_service import UserNotLoggedInError from spiffworkflow_backend.services.group_service import GroupService from spiffworkflow_backend.services.user_service import UserService @@ -98,20 +102,16 @@ class AuthorizationService: def verify_sha256_token(cls, auth_header: Optional[str]) -> None: """Verify_sha256_token.""" if auth_header is None: - raise ApiError( - error_code="unauthorized", - message="", - status_code=403, + raise TokenNotProvidedError( + "unauthorized", ) received_sign = auth_header.split("sha256=")[-1].strip() secret = current_app.config["GITHUB_WEBHOOK_SECRET"].encode() expected_sign = HMAC(key=secret, msg=request.data, digestmod=sha256).hexdigest() if not compare_digest(received_sign, expected_sign): - raise ApiError( - error_code="unauthorized", - message="", - status_code=403, + raise TokenInvalidError( + "unauthorized", ) @classmethod @@ -393,10 +393,8 @@ class AuthorizationService: authorization_exclusion_list = ["permissions_check"] if not hasattr(g, "user"): - raise ApiError( - error_code="user_not_logged_in", - message="User is not logged in. Please log in", - status_code=401, + raise UserNotLoggedInError( + "User is not logged in. Please log in", ) api_view_function = current_app.view_functions[request.endpoint] @@ -416,13 +414,11 @@ class AuthorizationService: if has_permission: return None - raise ApiError( - error_code="unauthorized", - message=( + raise NotAuthorizedError( + ( f"User {g.user.username} is not authorized to perform requested action:" f" {permission_string} - {request.path}" ), - status_code=403, ) @staticmethod @@ -440,13 +436,11 @@ class AuthorizationService: payload = jwt.decode(auth_token, options={"verify_signature": False}) return payload except jwt.ExpiredSignatureError as exception: - raise ApiError( - "token_expired", + raise TokenExpiredError( "The Authentication token you provided expired and must be renewed.", ) from exception except jwt.InvalidTokenError as exception: - raise ApiError( - "token_invalid", + raise TokenInvalidError( ( "The Authentication token you provided is invalid. You need a new" " token. " diff --git a/spiffworkflow-frontend/src/components/ProcessGroupForm.tsx b/spiffworkflow-frontend/src/components/ProcessGroupForm.tsx index 79ab8253..c82157d7 100644 --- a/spiffworkflow-frontend/src/components/ProcessGroupForm.tsx +++ b/spiffworkflow-frontend/src/components/ProcessGroupForm.tsx @@ -35,7 +35,7 @@ export default function ProcessGroupForm({ }; const hasValidIdentifier = (identifierToCheck: string) => { - return identifierToCheck.match(/^[a-z0-9][0-9a-z-]+[a-z0-9]$/); + return identifierToCheck.match(/^[a-z0-9][0-9a-z-]*[a-z0-9]$/); }; const handleFormSubmission = (event: any) => { From c237e218b24f70ed7fbb3d40b7df1ed788ca7a12 Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 2 Feb 2023 20:59:28 -0500 Subject: [PATCH 33/40] Squashed 'SpiffWorkflow/' changes from 98c6294f..0e61be85 0e61be85 Merge pull request #289 from sartography/improvement/execution-and-serialization-cleanup 527684da fix some typos in the class & method docs 0dff44a4 Merge branch 'main' into improvement/execution-and-serialization-cleanup 64737498 Allow for other PythonScriptEngine environments besides task data (#288) dd63e916 remove some unused tests & diagrams 24aae519 clean up various small stuff 3b2dc35d use context when opening files for parsing 69eec3eb update class/method docs 24528dfb move all spec conversion classes to top level 5af33b11 remove some unused methods related to old serializer 931b90fb reorganize serializer 4e81ed29 consolidate pointless serializer classes d62acf02 change task_spec._update_hook to return a boolean indicating whether the task is ready git-subtree-dir: SpiffWorkflow git-subtree-split: 0e61be85c47474a33037e6f398e64c96e02f13ad --- tests/SpiffWorkflow/bpmn/BoxDeepCopyTest.py | 2 +- .../SpiffWorkflow/bpmn/BpmnLoaderForTests.py | 7 +- .../bpmn/BpmnWorkflowTestCase.py | 7 +- tests/SpiffWorkflow/bpmn/CustomScriptTest.py | 5 +- .../bpmn/FeelExpressionEngineTest.py | 5 +- .../SpiffWorkflow/bpmn/NavListMulipleEnds.py | 47 - .../bpmn/PythonScriptEngineEnvironmentTest.py | 80 ++ tests/SpiffWorkflow/bpmn/TooManyLoopsTest.py | 7 +- .../bpmn/data/ComplexNavigation.bpmn | 746 ---------- ...ExclusiveGatewayMultipleEndNavigation.bpmn | 143 -- .../bpmn/data/NavLeapFrogLong.bpmn | 1209 ----------------- .../bpmn/data/invalid_process_sub.bpmn | 39 - tests/SpiffWorkflow/bpmn/data/rrt.bpmn | 336 ----- .../bpmn/data/serialization/v1.0.json | 6 +- .../bpmn/data/task_data_size.bpmn | 81 ++ .../data/timer_event_changes_last_task.bpmn | 77 -- .../bpmn/events/EventBasedGatewayTest.py | 5 +- .../bpmn/events/TimerCycleStartTest.py | 7 +- .../bpmn/events/TimerCycleTest.py | 7 +- .../bpmn/events/TimerDateTest.py | 5 +- .../events/TimerDurationBoundaryOnTaskTest.py | 3 +- .../bpmn/events/TimerDurationTest.py | 3 +- .../bpmn/serializer/BaseTestCase.py | 3 +- .../serializer/BpmnWorkflowSerializerTest.py | 3 +- .../bpmn/serializer/VersionMigrationTest.py | 6 +- tests/SpiffWorkflow/camunda/BaseTestCase.py | 12 +- .../camunda/CallActivityMessageTest.py | 2 +- .../camunda/DMNCustomScriptTest.py | 5 +- .../camunda/MessageBoundaryEventTest.py | 3 +- .../camunda/MultiInstanceDMNTest.py | 10 +- .../camunda/data/exclusive_gateway_pmi.bpmn | 94 -- .../camunda/data/random_fact.svg | 4 - .../camunda/data/top_workflow.bpmn | 64 - .../camunda/specs/UserTaskSpecTest.py | 5 +- tests/SpiffWorkflow/dmn/DecisionRunner.py | 2 +- tests/SpiffWorkflow/dmn/HitPolicyTest.py | 13 +- .../dmn/feel_engine/FeelDictDecisionTest.py | 4 +- .../FeelDictDotNotationDecisionTest.py | 2 +- .../DictDotNotationDecisionTest.py | 2 +- ...tDotNotationDecisionWeirdCharactersTest.py | 2 +- .../dmn/python_engine/PythonDecisionRunner.py | 5 +- tests/SpiffWorkflow/spiff/BaseTestCase.py | 22 +- 42 files changed, 248 insertions(+), 2842 deletions(-) delete mode 100644 tests/SpiffWorkflow/bpmn/NavListMulipleEnds.py create mode 100644 tests/SpiffWorkflow/bpmn/PythonScriptEngineEnvironmentTest.py delete mode 100644 tests/SpiffWorkflow/bpmn/data/ComplexNavigation.bpmn delete mode 100644 tests/SpiffWorkflow/bpmn/data/ExclusiveGatewayMultipleEndNavigation.bpmn delete mode 100644 tests/SpiffWorkflow/bpmn/data/NavLeapFrogLong.bpmn delete mode 100644 tests/SpiffWorkflow/bpmn/data/invalid_process_sub.bpmn delete mode 100644 tests/SpiffWorkflow/bpmn/data/rrt.bpmn create mode 100644 tests/SpiffWorkflow/bpmn/data/task_data_size.bpmn delete mode 100644 tests/SpiffWorkflow/bpmn/data/timer_event_changes_last_task.bpmn delete mode 100644 tests/SpiffWorkflow/camunda/data/exclusive_gateway_pmi.bpmn delete mode 100644 tests/SpiffWorkflow/camunda/data/random_fact.svg delete mode 100644 tests/SpiffWorkflow/camunda/data/top_workflow.bpmn diff --git a/tests/SpiffWorkflow/bpmn/BoxDeepCopyTest.py b/tests/SpiffWorkflow/bpmn/BoxDeepCopyTest.py index 9ebade90..c0b3cc79 100644 --- a/tests/SpiffWorkflow/bpmn/BoxDeepCopyTest.py +++ b/tests/SpiffWorkflow/bpmn/BoxDeepCopyTest.py @@ -1,6 +1,6 @@ import unittest -from SpiffWorkflow.bpmn.PythonScriptEngine import Box +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box class BoxDeepCopyTest(unittest.TestCase): diff --git a/tests/SpiffWorkflow/bpmn/BpmnLoaderForTests.py b/tests/SpiffWorkflow/bpmn/BpmnLoaderForTests.py index 9f90268d..2623e688 100644 --- a/tests/SpiffWorkflow/bpmn/BpmnLoaderForTests.py +++ b/tests/SpiffWorkflow/bpmn/BpmnLoaderForTests.py @@ -7,7 +7,7 @@ from SpiffWorkflow.bpmn.parser.TaskParser import TaskParser from SpiffWorkflow.bpmn.parser.task_parsers import ConditionalGatewayParser from SpiffWorkflow.bpmn.parser.util import full_tag -from SpiffWorkflow.bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter +from SpiffWorkflow.bpmn.serializer.helpers.spec import TaskSpecConverter # Many of our tests relied on the Packager to set the calledElement attribute on # Call Activities. I've moved that code to a customized parser. @@ -35,9 +35,6 @@ class TestUserTask(UserTask): task.set_data(choice=choice) task.complete() - @classmethod - def deserialize(self, serializer, wf_spec, s_state): - return serializer.deserialize_generic(wf_spec, s_state, TestUserTask) class TestExclusiveGatewayParser(ConditionalGatewayParser): @@ -47,7 +44,7 @@ class TestExclusiveGatewayParser(ConditionalGatewayParser): return cond return "choice == '%s'" % sequence_flow_node.get('name', None) -class TestUserTaskConverter(BpmnTaskSpecConverter): +class TestUserTaskConverter(TaskSpecConverter): def __init__(self, data_converter=None): super().__init__(TestUserTask, data_converter) diff --git a/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py b/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py index 8f2f0af5..ba564abc 100644 --- a/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py +++ b/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py @@ -7,13 +7,16 @@ from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnValidator from SpiffWorkflow.task import TaskState -from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer +from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer, DEFAULT_SPEC_CONFIG +from SpiffWorkflow.bpmn.serializer.task_spec import UserTaskConverter from .BpmnLoaderForTests import TestUserTaskConverter, TestBpmnParser __author__ = 'matth' +DEFAULT_SPEC_CONFIG['task_specs'].append(TestUserTaskConverter) -wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter([TestUserTaskConverter]) + +wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(spec_config=DEFAULT_SPEC_CONFIG) class BpmnWorkflowTestCase(unittest.TestCase): diff --git a/tests/SpiffWorkflow/bpmn/CustomScriptTest.py b/tests/SpiffWorkflow/bpmn/CustomScriptTest.py index 8cbca47f..d2b21886 100644 --- a/tests/SpiffWorkflow/bpmn/CustomScriptTest.py +++ b/tests/SpiffWorkflow/bpmn/CustomScriptTest.py @@ -4,6 +4,7 @@ import unittest from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.task import TaskState from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase @@ -17,8 +18,8 @@ class CustomBpmnScriptEngine(PythonScriptEngine): It will execute python code read in from the bpmn. It will also make any scripts in the scripts directory available for execution. """ def __init__(self): - augment_methods = {'custom_function': my_custom_function} - super().__init__(scripting_additions=augment_methods) + environment = TaskDataEnvironment({'custom_function': my_custom_function}) + super().__init__(environment=environment) class CustomInlineScriptTest(BpmnWorkflowTestCase): diff --git a/tests/SpiffWorkflow/bpmn/FeelExpressionEngineTest.py b/tests/SpiffWorkflow/bpmn/FeelExpressionEngineTest.py index 6fe07dec..474e988d 100644 --- a/tests/SpiffWorkflow/bpmn/FeelExpressionEngineTest.py +++ b/tests/SpiffWorkflow/bpmn/FeelExpressionEngineTest.py @@ -3,6 +3,7 @@ import unittest from SpiffWorkflow.bpmn.FeelLikeScriptEngine import FeelLikeScriptEngine, FeelInterval +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BoxedTaskDataEnvironment from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase import datetime @@ -12,7 +13,7 @@ __author__ = 'matth' class FeelExpressionTest(BpmnWorkflowTestCase): def setUp(self): - self.expressionEngine = FeelLikeScriptEngine() + self.expressionEngine = FeelLikeScriptEngine(environment=BoxedTaskDataEnvironment()) def testRunThroughExpressions(self): tests = [("string length('abcd')", 4, {}), @@ -62,7 +63,7 @@ class FeelExpressionTest(BpmnWorkflowTestCase): ] } x = self.expressionEngine._evaluate( - """sum([1 for x in exclusive if x.get('ExclusiveSpaceAMComputingID',None)==None])""", + """sum([1 for x in exclusive if x.get('ExclusiveSpaceAMComputingID',None)==None])""", data ) self.assertEqual(x, 1) diff --git a/tests/SpiffWorkflow/bpmn/NavListMulipleEnds.py b/tests/SpiffWorkflow/bpmn/NavListMulipleEnds.py deleted file mode 100644 index 4d410025..00000000 --- a/tests/SpiffWorkflow/bpmn/NavListMulipleEnds.py +++ /dev/null @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- - - - -import unittest -from SpiffWorkflow.bpmn.workflow import BpmnWorkflow -from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase - -__author__ = 'kellym' - - -class NavListExclusiveGatewayTest(BpmnWorkflowTestCase): - """The example bpmn diagram looks roughly like this, a gateway - that leads to two different end points - - [Step 1] -> - -> 'False' -> [Alternate End] -> END A - -> 'True' -> [Step 2] -> END B - """ - - def setUp(self): - self.spec = self.load_workflow1_spec() - - def load_workflow1_spec(self): - return self.load_workflow_spec('ExclusiveGatewayMultipleEndNavigation.bpmn','ExclusiveGatewayMultipleEndNavigation') - - def testRunThroughHappy(self): - - self.workflow = BpmnWorkflow(self.spec) - self.workflow.do_engine_steps() - nav_list = self.workflow.get_nav_list() - self.assertEqual(6, len(nav_list)) - - self.assertEqual("Step 1", nav_list[0]["description"]) - self.assertEqual("GatewayToEnd", nav_list[1]["description"]) - self.assertEqual("False", nav_list[2]["description"]) - self.assertEqual("Step End", nav_list[3]["description"]) - self.assertEqual("True", nav_list[4]["description"]) - self.assertEqual("Step 2", nav_list[5]["description"]) - - self.assertEqual(0, nav_list[0]["indent"]) - - -def suite(): - return unittest.TestLoader().loadTestsFromTestCase(NavListExclusiveGatewayTest) -if __name__ == '__main__': - unittest.TextTestRunner(verbosity=2).run(suite()) diff --git a/tests/SpiffWorkflow/bpmn/PythonScriptEngineEnvironmentTest.py b/tests/SpiffWorkflow/bpmn/PythonScriptEngineEnvironmentTest.py new file mode 100644 index 00000000..cb9c40c0 --- /dev/null +++ b/tests/SpiffWorkflow/bpmn/PythonScriptEngineEnvironmentTest.py @@ -0,0 +1,80 @@ +import json + +from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase +from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BasePythonScriptEngineEnvironment +from SpiffWorkflow.bpmn.workflow import BpmnWorkflow +from SpiffWorkflow.task import TaskState + +def example_global(): + pass + +class NonTaskDataExampleEnvironment(BasePythonScriptEngineEnvironment): + def __init__(self, environment_globals, environment): + self.environment = environment + self.environment.update(environment_globals) + super().__init__(environment_globals) + + def evaluate(self, expression, context, external_methods=None): + pass + + def execute(self, script, context, external_methods=None): + self.environment.update(context) + self.environment.update(external_methods or {}) + exec(script, self.environment) + self.environment = {k: v for k, v in self.environment.items() if k not in external_methods} + + def user_defined_values(self): + return {k: v for k, v in self.environment.items() if k not in self.globals} + +class PythonScriptEngineEnvironmentTest(BpmnWorkflowTestCase): + + def setUp(self): + spec, subprocesses = self.load_workflow_spec('task_data_size.bpmn', 'Process_ccz6oq2') + self.workflow = BpmnWorkflow(spec, subprocesses) + + def testTaskDataSizeWithDefaultPythonScriptEngine(self): + self.workflow.do_engine_steps() + + self.assertIn("a", self.workflow.data) + self.assertIn("b", self.workflow.data) + self.assertIn("c", self.workflow.data) + self.assertIn("d", self.workflow.data) + + task_data_len = self._get_task_data_len() + d_uniques = set(self.workflow.data["d"]) + d_len = len(self.workflow.data["d"]) + + self.assertGreater(task_data_len, 15000) + self.assertEqual(d_len, 512*3) + self.assertEqual(d_uniques, {"a", "b", "c"}) + + def testTaskDataSizeWithNonTaskDataEnvironmentBasedPythonScriptEngine(self): + script_engine_environment = NonTaskDataExampleEnvironment({"example_global": example_global}, {}) + script_engine = PythonScriptEngine(environment=script_engine_environment) + self.workflow.script_engine = script_engine + + self.workflow.do_engine_steps() + self.workflow.data.update(script_engine.environment.user_defined_values()) + + self.assertIn("a", self.workflow.data) + self.assertIn("b", self.workflow.data) + self.assertIn("c", self.workflow.data) + self.assertIn("d", self.workflow.data) + self.assertNotIn("example_global", self.workflow.data) + + task_data_len = self._get_task_data_len() + d_uniques = set(self.workflow.data["d"]) + d_len = len(self.workflow.data["d"]) + + self.assertEqual(task_data_len, 2) + self.assertEqual(d_len, 512*3) + self.assertEqual(d_uniques, {"a", "b", "c"}) + + def _get_task_data_len(self): + tasks_to_check = self.workflow.get_tasks(TaskState.FINISHED_MASK) + task_data = [task.data for task in tasks_to_check] + task_data_to_check = list(filter(len, task_data)) + task_data_len = len(json.dumps(task_data_to_check)) + return task_data_len + diff --git a/tests/SpiffWorkflow/bpmn/TooManyLoopsTest.py b/tests/SpiffWorkflow/bpmn/TooManyLoopsTest.py index 072d9375..0b3d5603 100644 --- a/tests/SpiffWorkflow/bpmn/TooManyLoopsTest.py +++ b/tests/SpiffWorkflow/bpmn/TooManyLoopsTest.py @@ -4,6 +4,7 @@ import datetime import unittest from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase @@ -14,10 +15,10 @@ class CustomScriptEngine(PythonScriptEngine): It will execute python code read in from the bpmn. It will also make any scripts in the scripts directory available for execution. """ def __init__(self): - augment_methods = { + environment = TaskDataEnvironment({ 'timedelta': datetime.timedelta, - } - super().__init__(scripting_additions=augment_methods) + }) + super().__init__(environment=environment) class TooManyLoopsTest(BpmnWorkflowTestCase): diff --git a/tests/SpiffWorkflow/bpmn/data/ComplexNavigation.bpmn b/tests/SpiffWorkflow/bpmn/data/ComplexNavigation.bpmn deleted file mode 100644 index 87b743e0..00000000 --- a/tests/SpiffWorkflow/bpmn/data/ComplexNavigation.bpmn +++ /dev/null @@ -1,746 +0,0 @@ - - - - - Flow_0kcrx5l - - - - Flow_0kcrx5l - Flow_1seuuie - - - The Study's Responsible Organization is needed in order to confirm the Department Chair. If it is the same as the Primary Investigator's Primary Department show below, we have all the information needed to determine the Department Chair. - - -**Primary Investigator's Primary Appointment** -***School:*** {{ pi.E0.schoolName }} -***Department:*** {{ pi.E0.deptName }} - - - - - - - - - - - - - Flow_12obxbo - Flow_1y4gjsg - - - Flow_02614fd - Flow_0c4tt8e - ro.chair = {} -ro.chair.uid = RO_Chair_CID -ro.chair.name_degree = RO_Chair_Name_Degree -ro.chair.title = RO_Chair_Title -ro.chair.sig_block = RO_Chair_Sig_Block - - - Flow_1seuuie - Flow_1ni06mz - Flow_1y9edqt - - - Flow_1y9edqt - Flow_1oriwwz - Flow_185jvp3 - - - Flow_185jvp3 - Flow_1dh8c45 - sch_enum = [] -if pi.E0.schoolAbbrv != "MD": - sch_enum_md = [ - { - "value": "MD", - "label": "Medicine" - }, - ] -else: - sch_enum_md = [] -if pi.E0.schoolAbbrv != "AS": - sch_enum_as = [ - { - "value": "AS", - "label": "Arts & Science" - }, - ] -else: - sch_enum_as = [] -if pi.E0.schoolAbbrv != "CU": - sch_enum_cu = [ - { - "value": "CU", - "label": "Education" - }, - ] -else: - sch_enum_cu = [] -if pi.E0.schoolAbbrv != "NR": - sch_enum_nr = [ - { - "value": "NR", - "label": "Nursing" - }, - ] -else: - sch_enum_nr = [] -sch_enum = sch_enum_md + sch_enum_as + sch_enum_cu + sch_enum_nr -del(sch_enum_md) -del(sch_enum_as) -del(sch_enum_cu) -del(sch_enum_nr) - - - - - - - - - - - - - - Flow_1dh8c45 - Flow_0mf9npl - - - Flow_1oriwwz - Flow_0nmpxmc - Flow_12obxbo - Flow_03s8gvx - Flow_0nzochy - Flow_0h955ao - - - Flow_1y4gjsg - Flow_0lnb8jw - Flow_1fqtd41 - Flow_0a626ba - - - Flow_0a626ba - Flow_0ssrpqx - if PIsPrimaryDepartmentSameAsRO.value == "diffSchool": - ro.schoolName = RO_StudySchool.label - ro.schoolAbbrv = RO_StudySchool.value - -if PIsPrimaryDepartmentSameAsRO.value != "yes": - if ro.schoolAbbrv == "MD": - ro.deptName = RO_StudyDeptMedicine.label - ro.deptAbbrv = RO_StudyDeptMedicine.value - elif ro.schoolAbbrv == "AS": - ro.deptName = RO_StudyDeptArtsSciences.label - ro.deptAbbrv = RO_StudyDeptArtsSciences.value - elif ro.schoolAbbrv == "CU": - ro.deptName = RO_StudyDeptEducation.label - ro.deptAbbrv = RO_StudyDeptEducation.value - else: - ro.deptName = "" - ro.deptAbbrv = "" - - - The Study's Responsible Organization is needed in order to confirm the Department Chair. If it is the same as the Primary Investigator's Primary Department show below, we have all the information needed to determine the Department Chair. - - -**Primary Investigator's Primary Appointment** -***School:*** {{ pi.E0.schoolName }} -***Department:*** {{ pi.E0.deptName }} - - - - - - - - - - - - - Flow_0nzochy - Flow_0lnb8jw - - - The Study's Responsible Organization is needed in order to confirm the Department Chair. If it is the same as the Primary Investigator's Primary Department show below, we have all the information needed to determine the Department Chair. - - -**Primary Investigator's Primary Appointment** -***School:*** {{ pi.E0.schoolName }} -***Department:*** {{ pi.E0.deptName }} - - - - - - - - - - - - - Flow_0h955ao - Flow_1fqtd41 - - - Flow_0mf9npl - Flow_0nmpxmc - ro.schoolName = RO_StudySchool.label -ro.schoolAbbrv = RO_StudySchool.value - - - Flow_03s8gvx - Flow_0ssrpqx - Flow_0tnnt3b - - - ro.schoolAbbrv == "CU" - - - - - - - PIsPrimaryDepartmentSameAsRO.value != "yes" - - - - PIsPrimaryDepartmentSameAsRO.value == 'diffSchool' - - - - - - ro.schoolAbbrv not in ["MD", "AS", "CU"] - - - - ro.schoolAbbrv == "AS" - - - - - - - - Flow_1ni06mz - Flow_0tnnt3b - Flow_02614fd - - - temp - Flow_15xpsq8 - Flow_1g7q28p - - - Flow_0cqbu1f - Flow_1d4sb3d - Flow_12oux1f - Flow_0ygr7cu - - - The following Primary Coordinators were entered in Protocol Builder: -{%+ for key, value in pcs.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_pcs %}, {% endif %}{% endfor %} -To Save the current settings for all Primary Coordinators, select Save All. - - -Otherwise, edit each Coordinator as necessary and select the Save button for each. - -### Please provide supplemental information for: - #### {{ pc.display_name }} - ##### Title: {{ pc.title }} - - ##### Department: {{ pc.department }} - ##### Affiliation: {{ pc.affiliation }} - - - - - - - Flow_12oux1f - Flow_1ik148z - - - - Flow_0c4tt8e - Flow_05g7d16 - Flow_13zasb1 - - - The PI is also the RO Chair - Flow_13zasb1 - Flow_0cqbu1f - - - Flow_0efu6u1 - Flow_0a3fjzp - Flow_0ljn2v6 - Flow_0pdoc38 - - - The following Sub-Investigators were entered in Protocol Builder: -{%+ for key, value in subs.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_subs %}, {% endif %}{% endfor %} -To Save the current settings for all Sub-Investigators, select Save All. - - -Otherwise, edit each Sub-Investigator as necessary and select the Save button for each. - - -### Please provide supplemental information for: - #### {{ sub.display_name }} - ##### Title: {{ sub.title }} - - ##### Department: {{ sub.department }} - ##### Affiliation: {{ sub.affiliation }} - - - - - - - Flow_0ljn2v6 - Flow_07vu2b0 - - - - Flow_1ik148z - Flow_0ygr7cu - Flow_0a3fjzp - Flow_0rstqv5 - - - The following Additional Coordinators were entered in Protocol Builder: -{%+ for key, value in acs.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_acs %}, {% endif %}{% endfor %} -To Save the current settings for all Additional Coordinators, select Save All. - - - - -Otherwise, edit each Coordinator as necessary and select the Save button for each. - - -### Please provide supplemental information for: - #### {{ acs.display_name }} - ##### Title: {{ acs.title }} - - - ##### Department: {{ acs.department }} - ##### Affiliation: {{ acs.affiliation }} - Flow_0rstqv5 - Flow_0efu6u1 - - - - Flow_0pdoc38 - Flow_07vu2b0 - Flow_1g7q28p - Flow_0qti1ms - - - The following Additional Personnel were entered in Protocol Builder: -{%+ for key, value in aps.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_aps %}, {% endif %}{% endfor %} -To Save the current settings for all Additional Personnel, select Save All. - - - - -Otherwise, edit each Additional Personnel as necessary and select the Save button for each. - - - - -### Please provide supplemental information for: - #### {{ ap.display_name }} - ##### Title: {{ ap.title }} - - - ##### Department: {{ ap.department }} - ##### Affiliation: {{ ap.affiliation }} - - - - - - - Flow_0qti1ms - Flow_15xpsq8 - - - - ***Name & Degree:*** {{ RO_Chair_Name_Degree }} -***School:*** {{ RO_School }} -***Department:*** {{ RO_Department }} -***Title:*** {{ RO_Chair_Title }} -***Email:*** {{ RO_Chair_CID }} - - -{% if RO_Chair_CID != dc.uid %} - *Does not match the Department Chair specified in Protocol Builder, {{ dc.display_name }}* -{% endif %} - - - - - - - - - - Flow_05g7d16 - Flow_1d4sb3d - - - - - - - - - - - RO_Chair_CID == pi.uid - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/SpiffWorkflow/bpmn/data/ExclusiveGatewayMultipleEndNavigation.bpmn b/tests/SpiffWorkflow/bpmn/data/ExclusiveGatewayMultipleEndNavigation.bpmn deleted file mode 100644 index 28c4a553..00000000 --- a/tests/SpiffWorkflow/bpmn/data/ExclusiveGatewayMultipleEndNavigation.bpmn +++ /dev/null @@ -1,143 +0,0 @@ - - - - - Flow_0kcrx5l - - - ##### Please confirm Primary Investigator entered in Protocol Builder is correct and if so, provide additional information: -### **{{ pi.display_name }}** -***Email:*** {{ pi.email_address }} - -**Primary Appointment** -***School:*** {{ pi.E0.schoolName }} -***Department:*** {{ pi.E0.deptName }} - - -{% if is_me_pi %} -Since you are the person entering this information, you already have access and will receive all emails. -{% endif %} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Flow_147b9li - Flow_0xnj2rp - - - - - Flow_1dcsioh - Flow_147b9li - Flow_00prawo - - - tru - - - false - - - Flow_16qr5jf - - - - Flow_0kcrx5l - Flow_1dcsioh - - - No PI entered in PB - Flow_00prawo - Flow_16qr5jf - - - Flow_0xnj2rp - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/SpiffWorkflow/bpmn/data/NavLeapFrogLong.bpmn b/tests/SpiffWorkflow/bpmn/data/NavLeapFrogLong.bpmn deleted file mode 100644 index 6a92338e..00000000 --- a/tests/SpiffWorkflow/bpmn/data/NavLeapFrogLong.bpmn +++ /dev/null @@ -1,1209 +0,0 @@ - - - - - Flow_0kcrx5l - - - Flow_0kcrx5l - Flow_1dcsioh - current_user = ldap() -investigators = study_info('investigators') -# Primary Investigator -pi = investigators.get('PI', None) -is_cu_pi = False -if pi != None: - hasPI = True - study_data_set("PiUid",pi['uid']) - if pi.get('uid', None) != None: - pi_invalid_uid = False - if pi['uid'] == current_user['uid']: - is_cu_pi = True - else: - pi_invalid_uid = True -else: - hasPI = False - -# Department Chair -dc = investigators.get('DEPT_CH', None) -if dc != None: - if dc.get('uid', None) != None: - dc_invalid_uid = False - else: - dc_invalid_uid = True -else: - dc_invalid_uid = False - -# Primary Coordinators -pcs = {} -is_cu_pc = False -cnt_pcs_uid = 0 -for k in investigators.keys(): - if k in ['SC_I','SC_II','IRBC']: - investigator = investigators.get(k) - if investigator.get('uid', None) != None: - if investigator['uid'] != current_user['uid']: - pcs[k] = investigator - cnt_pcs_uid = cnt_pcs_uid + 1 - else: - is_cu_pc = True - is_cu_pc_role = investigator['label'] - else: - pcs[k] = investigator -cnt_pcs = len(pcs.keys()) -if cnt_pcs != cnt_pcs_uid: - pcs_invalid_uid = True -else: - pcs_invalid_uid = False -if cnt_pcs > 0: - del(k) - del(investigator) - -# Additional Coordinators -acs = {} -is_cu_ac = False -cnt_acs_uid = 0 -for k in investigators.keys(): - if k == 'AS_C': - investigator = investigators.get(k) - if investigator.get('uid', None) != None: - if investigator['uid'] != current_user['uid']: - acs[k] = investigator - cnt_acs_uid = cnt_acs_uid + 1 - else: - is_cu_ac = True - is_cu_ac_role = investigator['label'] - else: - acs[k] = investigator -cnt_acs = len(acs.keys()) -if cnt_pcs != cnt_pcs_uid: - acs_invalid_uid = True -else: - acs_invalid_uid = False -if cnt_acs > 0: - del(k) - del(investigator) - -# Sub-Investigatoers -subs = {} -is_cu_subs = False -cnt_subs_uid = 0 -for k in investigators.keys(): - if k[:2] == 'SI': - investigator = investigators.get(k) - if investigator.get('uid', None) != None: - if investigator['uid'] != current_user['uid']: - subs[k] = investigator - cnt_subs_uid = cnt_subs_uid + 1 - else: - is_cu_subs = True - else: - subs[k] = investigator -cnt_subs = len(subs.keys()) -if cnt_subs != cnt_subs_uid: - subs_invalid_uid = True -else: - subs_invalid_uid = False -if cnt_subs > 0: - del(k) - del(investigator) - -# Additional Personnel -aps = {} -is_cu_ap = False -cnt_aps_uid = 0 -for k in investigators.keys(): - if k in ['SCI','DC']: - investigator = investigators.get(k) - if investigator.get('uid', None) != None: - if investigator['uid'] != current_user['uid']: - aps[k] = investigator - cnt_aps_uid = cnt_aps_uid + 1 - else: - is_cu_ap = True - is_cu_ap_role = investigator['label'] - else: - aps[k] = investigator -cnt_aps = len(aps.keys()) -if cnt_aps != cnt_aps_uid: - aps_invalid_uid = True -else: - aps_invalid_uid = False -if cnt_aps > 0: - del(k) - del(investigator) -del(investigators) - - - temp - Flow_10zn0h1 - Flow_0kp47dz - - - ##### Please confirm Primary Investigator entered in Protocol Builder is correct and if so, provide additional information: -### **{{ pi.display_name }}** -***Email:*** {{ pi.email_address }} - -**Primary Appointment** -***School:*** {{ pi.E0.schoolName }} -***Department:*** {{ pi.E0.deptName }} - - -{% if is_me_pi %} -Since you are the person entering this information, you already have access and will receive all emails. -{% endif %} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Flow_1kg5jot - Flow_1mplloa - - - - - Flow_1dcsioh - Flow_147b9li - Flow_00prawo - - - - not(hasPI) or (hasPI and pi_invalid_uid) - - - No PI entered in PB - Flow_00prawo - Flow_16qr5jf - - - Flow_0kpe12r - Flow_1ayisx2 - Flow_0xifvai - Flow_1oqem42 - - - - - The following Primary Coordinators were entered in Protocol Builder: -{%+ for key, value in pcs.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_pcs %}, {% endif %}{% endfor %} -To Save the current settings for all Primary Coordinators, select Save All. - - -Otherwise, edit each Coordinator as necessary and select the Save button for each. - -### Please provide supplemental information for: - #### {{ pc.display_name }} - ##### Title: {{ pc.title }} - - ##### Department: {{ pc.department }} - ##### Affiliation: {{ pc.affiliation }} - - - - - - - Flow_0xifvai - Flow_1n0k4pd - - - - cnt_pcs == 0 - - - Flow_0tfprc8 - Flow_0tsdclr - Flow_1grahhv - LDAP_dept = pi.department -length_LDAP_dept = len(LDAP_dept) -pi.E0 = {} -if length_LDAP_dept > 0: - E0_start = LDAP_dept.find("E0:") + 3 - E0_slice = LDAP_dept[E0_start:length_LDAP_dept] - E0_first_hyphen = E0_slice.find("-") - E0_dept_start = E0_first_hyphen + 1 - pi.E0.schoolAbbrv = E0_slice[0:E0_first_hyphen] - isSpace = " " in E0_slice - if isSpace: - E0_first_space = E0_slice.find(" ") - E0_spec_start = E0_first_space + 1 - E0_spec_end = len(E0_slice) - pi.E0.deptAbbrv = E0_slice[E0_dept_start:E0_first_space] - pi.E0.specName = E0_slice[E0_spec_start:E0_spec_end] - else: - pi.E0.specName = "" -else: - pi.E0.schoolAbbrv = "Not in LDAP" - pi.E0.deptAbbrv = "Not in LDAP" - pi.E0.specName = "Not in LDAP" - - - - The Study's Responsible Organization is needed in order to confirm the Department Chair. If it is the same as the Primary Investigator's Primary Department show below, we have all the information needed to determine the Department Chair. - - -**Primary Investigator's Primary Appointment** -***School:*** {{ pi.E0.schoolName }} -***Department:*** {{ pi.E0.deptName }} - - - - - - - - - - - - - Flow_0iuzu7j - Flow_0whqr3p - - - - Flow_070j5fg - Flow_0vi6thu - Flow_00yhlrq - - - - RO_Chair_CID == pi.uid - - - The PI is also the RO Chair - Flow_00yhlrq - Flow_0kpe12r - - - - Flow_12ss6u8 - Flow_0dt3pjw - Flow_05rqrlf - Flow_0jxzqw1 - - - - - cnt_subs == 0 - - - The following Sub-Investigators were entered in Protocol Builder: -{%+ for key, value in subs.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_subs %}, {% endif %}{% endfor %} -To Save the current settings for all Sub-Investigators, select Save All. - - -Otherwise, edit each Sub-Investigator as necessary and select the Save button for each. - - -### Please provide supplemental information for: - #### {{ sub.display_name }} - ##### Title: {{ sub.title }} - - ##### Department: {{ sub.department }} - ##### Affiliation: {{ sub.affiliation }} - - - - - - - Flow_05rqrlf - Flow_0ofpgml - - - - Please enter the Private Investigator in Protocol Builder. - Flow_16qr5jf - - - - - Flow_1grahhv - Flow_1kg5jot - pi.E0.schoolName = PI_E0_schoolName -pi.E0.deptName = PI_E0_deptName -pi_experience_key = "pi_experience_" + pi.user_id -pi.experience = user_data_get(pi_experience_key,"") -ro = {} -ro['chair'] = {} - - - - Flow_1oo0ijr - Flow_070j5fg - ro.chair = {} -ro.chair.uid = RO_Chair_CID -ro.chair.name_degree = RO_Chair_Name_Degree -ro.chair.title = RO_Chair_Title -ro.chair.sig_block = RO_Chair_Sig_Block - - - Flow_1n0k4pd - Flow_1oqem42 - Flow_1gtl2o3 - Flow_0dt3pjw - - - - - The following Additional Coordinators were entered in Protocol Builder: -{%+ for key, value in acs.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_acs %}, {% endif %}{% endfor %} -To Save the current settings for all Additional Coordinators, select Save All. - - - - -Otherwise, edit each Coordinator as necessary and select the Save button for each. - - -### Please provide supplemental information for: - #### {{ acs.display_name }} - ##### Title: {{ acs.title }} - - - ##### Department: {{ acs.department }} - ##### Affiliation: {{ acs.affiliation }} - Flow_1gtl2o3 - Flow_12ss6u8 - - - - cnt_acs == 0 - - - Flow_1va8c15 - Flow_1yd7kbi - Flow_0w4d2bz - - - Flow_1yd7kbi - Flow_13la8l3 - Flow_0ycdxbl - - - PIsPrimaryDepartmentSameAsRO.value != "yes" - - - - PIsPrimaryDepartmentSameAsRO.value == 'diffSchool' - - - Flow_0ycdxbl - Flow_1fj9iz0 - sch_enum = [] -if pi.E0.schoolAbbrv != "MD": - sch_enum_md = [ - { - "value": "MD", - "label": "Medicine" - }, - ] -else: - sch_enum_md = [] -if pi.E0.schoolAbbrv != "AS": - sch_enum_as = [ - { - "value": "AS", - "label": "Arts & Science" - }, - ] -else: - sch_enum_as = [] -if pi.E0.schoolAbbrv != "CU": - sch_enum_cu = [ - { - "value": "CU", - "label": "Education" - }, - ] -else: - sch_enum_cu = [] -if pi.E0.schoolAbbrv != "NR": - sch_enum_nr = [ - { - "value": "NR", - "label": "Nursing" - }, - ] -else: - sch_enum_nr = [] -sch_enum = sch_enum_md + sch_enum_as + sch_enum_cu + sch_enum_nr -del(sch_enum_md) -del(sch_enum_as) -del(sch_enum_cu) -del(sch_enum_nr) - - - - - - - - - - - - - - - Flow_1fj9iz0 - Flow_1yz8k2a - - - - - Flow_13la8l3 - Flow_0mdjaid - Flow_0fw4rck - Flow_1azfvtx - Flow_0giqf35 - Flow_0iuzu7j - - - ro.schoolAbbrv not in ["MD", "AS", "CU"] - - - Flow_0whqr3p - Flow_0zc01f9 - Flow_1vyg8ir - Flow_0m9peiz - - - - Flow_0m9peiz - Flow_1vv63qa - if PIsPrimaryDepartmentSameAsRO.value == "diffSchool": - ro.schoolName = RO_StudySchool.label - ro.schoolAbbrv = RO_StudySchool.value - -if PIsPrimaryDepartmentSameAsRO.value != "yes": - if ro.schoolAbbrv == "MD": - ro.deptName = RO_StudyDeptMedicine.label - ro.deptAbbrv = RO_StudyDeptMedicine.value - elif ro.schoolAbbrv == "AS": - ro.deptName = RO_StudyDeptArtsSciences.label - ro.deptAbbrv = RO_StudyDeptArtsSciences.value - elif ro.schoolAbbrv == "CU": - ro.deptName = RO_StudyDeptEducation.label - ro.deptAbbrv = RO_StudyDeptEducation.value - else: - ro.deptName = "" - ro.deptAbbrv = "" - - - The Study's Responsible Organization is needed in order to confirm the Department Chair. If it is the same as the Primary Investigator's Primary Department show below, we have all the information needed to determine the Department Chair. - - -**Primary Investigator's Primary Appointment** -***School:*** {{ pi.E0.schoolName }} -***Department:*** {{ pi.E0.deptName }} - - - - - - - - - - - - - Flow_1azfvtx - Flow_0zc01f9 - - - Flow_1e0yt3v - Flow_0shnt6k - Flow_1va8c15 - ro = {} -ro['schoolName'] = PI_E0_schoolName -ro['schoolAbbrv'] = pi.E0.schoolAbbrv -ro['deptName'] = pi.E0.deptName -ro['deptAbbrv'] = pi.E0.deptAbbrv - - - - - - The Study's Responsible Organization is needed in order to confirm the Department Chair. If it is the same as the Primary Investigator's Primary Department show below, we have all the information needed to determine the Department Chair. - - -**Primary Investigator's Primary Appointment** -***School:*** {{ pi.E0.schoolName }} -***Department:*** {{ pi.E0.deptName }} - - - - - - - - - - - - - Flow_0giqf35 - Flow_1vyg8ir - - - - - - - Flow_1yz8k2a - Flow_0mdjaid - ro.schoolName = RO_StudySchool.label -ro.schoolAbbrv = RO_StudySchool.value - - - - ro.schoolAbbrv == "AS" - - - ro.schoolAbbrv == "CU" - - - Flow_1vv63qa - Flow_0fw4rck - Flow_0vff9k5 - - - - Flow_0ofpgml - Flow_0jxzqw1 - Flow_0q56tn8 - Flow_0kp47dz - - - - - cnt_aps == 0 - - - The following Additional Personnel were entered in Protocol Builder: -{%+ for key, value in aps.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_aps %}, {% endif %}{% endfor %} -To Save the current settings for all Additional Personnel, select Save All. - - - - -Otherwise, edit each Additional Personnel as necessary and select the Save button for each. - - - - -### Please provide supplemental information for: - #### {{ ap.display_name }} - ##### Title: {{ ap.title }} - - - ##### Department: {{ ap.department }} - ##### Affiliation: {{ ap.affiliation }} - - - - - - - Flow_0q56tn8 - Flow_10zn0h1 - - - - Flow_147b9li - Flow_0tfprc8 - Flow_0nz62mu - - - - dc_invalid_uid or pcs_invalid_uid or acs_invalid_uid or subs_invalid_uid or aps_invalid_uid - - - Select No if all displayed invalid Computing IDs do not need system access and/or receive emails. If they do, correct in Protocol Builder first and then select Yes. - - -{% if dc_invalid_uid %} -Department Chair - {{ dc.error }} -{% endif %} -{% if pcs_invalid_uid %} -Primary Coordinators -{% for k, pc in pcs.items() %} - {% if pc.get('uid', None) == None: %} - {{ pc.error }} - {% endif %} -{% endfor %} -{% endif %} -{% if acs_invalid_uid %} -Additional Coordinators -{% for k, ac in acs.items() %} - {% if ac.get('uid', None) == None: %} - {{ ac.error }} - {% endif %} -{% endfor %} -{% endif %} -{% if subs_invalid_uid %} -Sub-Investigators -{% for k, sub in subs.items() %} - {% if sub.get('uid', None) == None: %} - {{ sub.error }} - {% endif %} -{% endfor %} -{% endif %} -{% if aps_invalid_uid %} -Additional Personnnel -{% for k, ap in aps.items() %} - {% if ap.get('uid', None) == None: %} - {{ ap.error }} - {% endif %} -{% endfor %} -{% endif %} - - - - - - - - - - Flow_0nz62mu - Flow_16bkbuc - - - Flow_16bkbuc - Flow_0tsdclr - Flow_1mtwuyq - - - - not(FixInvalidUIDs) - - - ***Name & Degree:*** {{ RO_Chair_Name_Degree }} -***School:*** {{ RO_School }} -***Department:*** {{ RO_Department }} -***Title:*** {{ RO_Chair_Title }} -***Email:*** {{ RO_Chair_CID }} - - -{% if RO_Chair_CID != dc.uid %} - *Does not match the Department Chair specified in Protocol Builder, {{ dc.display_name }}* -{% endif %} - - - - - - - - - - Flow_0vi6thu - Flow_1ayisx2 - - - - Flow_07ur9cc - Flow_0shnt6k - user_data_set(pi_experience_key, pi.experience) - - - Flow_1mplloa - Flow_07ur9cc - Flow_1e0yt3v - - - pi.experience != user_data_get(pi_experience_key,"") - - - - - Flow_0vff9k5 - Flow_0w4d2bz - Flow_1oo0ijr - - - Flow_1mtwuyq - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/SpiffWorkflow/bpmn/data/invalid_process_sub.bpmn b/tests/SpiffWorkflow/bpmn/data/invalid_process_sub.bpmn deleted file mode 100644 index 2a8793e9..00000000 --- a/tests/SpiffWorkflow/bpmn/data/invalid_process_sub.bpmn +++ /dev/null @@ -1,39 +0,0 @@ - - - - - Flow_0xpz6la - - - Flow_0xpz6la - Flow_03yam6h - print('complicated common task') - - - - Flow_03yam6h - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/SpiffWorkflow/bpmn/data/rrt.bpmn b/tests/SpiffWorkflow/bpmn/data/rrt.bpmn deleted file mode 100644 index e6d1afb2..00000000 --- a/tests/SpiffWorkflow/bpmn/data/rrt.bpmn +++ /dev/null @@ -1,336 +0,0 @@ - - - - - SequenceFlow_05ja25w - - - ### UNIVERSITY OF VIRGINIA RESEARCH -#### Research Ramp-up Plan - - -As we plan for the resumption of on-grounds research, PIs are required to develop a Research Ramp-up Plan. Please use the ramp-up guidance provided to lay out your plan(s) to manage operations while prioritizing physical distancing, staggered work shifts to reduce group size, remote work, and other exposure-reducing measures. - - -Plans must be submitted to the Office of Research by Monday, May ?? for consideration in the first round of approvals. Plans will then be reviewed on a rolling basis going forward. - - -Instructions for Submitting: - - -1. Add a Request for each lab space you manage in a building. If your lab spans multiple rooms or floors in a single building, one request will be required for that lab. If your lab spans multipe buildings, one request for each building will be required for that lab. The primary reason for this differentiation is that in addition to obtaining approval to restart operations, this information will also be used after start up to assist with any contact tracing that may be needed. - - -2. Select each Request added and step through each form presented, responding to all required and applicable fields. You may be presented with different questions if activities in each lab differ. - - -3. After all forms have been completed, you will be presented with the option to create your Research Recovery Plan in Word format. Download the document and review it. If you see any corrections that need to be made, return to the coresponding form and make the correction. - - -4. Once the generated Research Recovery Plan is finalize, use the web site to submit it to the Office of the Vice President for Research for review. - - -Please submit questions on the Research Support website. - SequenceFlow_05ja25w - SequenceFlow_0h50bp3 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - SequenceFlow_0h50bp3 - SequenceFlow_0bqu7pp - - - - - - ### {{ LabName }} -#### Lab details - - -Your response to these questions will determine if you do or do not provide additional information regarding each topic later. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - SequenceFlow_0bqu7pp - Flow_0scfmzc - - - SequenceFlow_1qtrgbv - - - - Review plan, make changes if needed, continue of ready to submit. - Flow_1b6vbkk - Flow_1e2qi9s - - - - Flow_1e2qi9s - SequenceFlow_1qtrgbv - CompleteTemplate ResearchRecoveryPlan.docx RESEARCH_RECOVERY - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Flow_0so3402 - SequenceFlow_1yi9lig - - - Flow_0scfmzc - Flow_0so3402 - Flow_0141rp3 - - - isAnimalUse == True - - - - - - - - - - - - - - Flow_1121pfu - SequenceFlow_1b4non2 - - - Flow_0141rp3 - SequenceFlow_1yi9lig - Flow_1121pfu - SequenceFlow_1wp5zmg - - - isGrantSupport == True - - - SequenceFlow_1b4non2 - SequenceFlow_1wp5zmg - Flow_1b6vbkk - - - - - isGrantSupport == False - - - - isAnimalUse == False - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/SpiffWorkflow/bpmn/data/serialization/v1.0.json b/tests/SpiffWorkflow/bpmn/data/serialization/v1.0.json index f580929a..39307bd3 100644 --- a/tests/SpiffWorkflow/bpmn/data/serialization/v1.0.json +++ b/tests/SpiffWorkflow/bpmn/data/serialization/v1.0.json @@ -142,7 +142,7 @@ "typename":"SequenceFlow" } }, - "typename":"TestUserTask", + "typename":"UserTask", "extensions":{} }, "sid-C014B4B9-889F-4EE9-9949-C89502C35CF0":{ @@ -697,7 +697,7 @@ "typename":"SequenceFlow" } }, - "typename":"TestUserTask", + "typename":"UserTask", "extensions":{} }, "sid-2EDAD784-7F15-486C-B805-D26EE25F8087":{ @@ -906,7 +906,7 @@ "typename":"SequenceFlow" } }, - "typename":"TestUserTask", + "typename":"UserTask", "extensions":{} }, "sid-BC014079-199F-4720-95CD-244B0ACB6DE1":{ diff --git a/tests/SpiffWorkflow/bpmn/data/task_data_size.bpmn b/tests/SpiffWorkflow/bpmn/data/task_data_size.bpmn new file mode 100644 index 00000000..22a39425 --- /dev/null +++ b/tests/SpiffWorkflow/bpmn/data/task_data_size.bpmn @@ -0,0 +1,81 @@ + + + + + Flow_177wrsb + + + + + Flow_0hkxb5e + + + + Flow_177wrsb + Flow_0eductu + a="a"*512 + + + Flow_0eductu + Flow_1xryi5d + b="b"*512 + + + + Flow_1xryi5d + Flow_1of7r00 + c="c"*512 + + + + Flow_1of7r00 + Flow_0hkxb5e + d=a+b+c + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/SpiffWorkflow/bpmn/data/timer_event_changes_last_task.bpmn b/tests/SpiffWorkflow/bpmn/data/timer_event_changes_last_task.bpmn deleted file mode 100644 index 8b6acb97..00000000 --- a/tests/SpiffWorkflow/bpmn/data/timer_event_changes_last_task.bpmn +++ /dev/null @@ -1,77 +0,0 @@ - - - - - Flow_164sojd - - - Flow_1m2vq4v - Flow_04tuv5z - - - - Flow_0ac4lx5 - - timedelta(milliseconds=2) - - - - - Flow_0ac4lx5 - timer_called = True - - - Some docs - Flow_04tuv5z - - - - - Flow_164sojd - Flow_1m2vq4v - timer_called = False - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/SpiffWorkflow/bpmn/events/EventBasedGatewayTest.py b/tests/SpiffWorkflow/bpmn/events/EventBasedGatewayTest.py index 6e549784..29febd24 100644 --- a/tests/SpiffWorkflow/bpmn/events/EventBasedGatewayTest.py +++ b/tests/SpiffWorkflow/bpmn/events/EventBasedGatewayTest.py @@ -2,6 +2,7 @@ from datetime import timedelta from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition from SpiffWorkflow.task import TaskState @@ -11,7 +12,7 @@ class EventBsedGatewayTest(BpmnWorkflowTestCase): def setUp(self): self.spec, self.subprocesses = self.load_workflow_spec('event-gateway.bpmn', 'Process_0pvx19v') - self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta}) + self.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"timedelta": timedelta})) self.workflow = BpmnWorkflow(self.spec, script_engine=self.script_engine) def testEventBasedGateway(self): @@ -29,8 +30,8 @@ class EventBsedGatewayTest(BpmnWorkflowTestCase): self.workflow.script_engine = self.script_engine self.assertEqual(len(waiting_tasks), 1) self.workflow.catch(MessageEventDefinition('message_1')) - self.workflow.refresh_waiting_tasks() self.workflow.do_engine_steps() + self.workflow.refresh_waiting_tasks() self.assertEqual(self.workflow.is_completed(), True) self.assertEqual(self.workflow.get_tasks_from_spec_name('message_1_event')[0].state, TaskState.COMPLETED) self.assertEqual(self.workflow.get_tasks_from_spec_name('message_2_event')[0].state, TaskState.CANCELLED) diff --git a/tests/SpiffWorkflow/bpmn/events/TimerCycleStartTest.py b/tests/SpiffWorkflow/bpmn/events/TimerCycleStartTest.py index bf89912c..58e100ea 100644 --- a/tests/SpiffWorkflow/bpmn/events/TimerCycleStartTest.py +++ b/tests/SpiffWorkflow/bpmn/events/TimerCycleStartTest.py @@ -5,6 +5,7 @@ import unittest import time from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase @@ -24,11 +25,11 @@ class CustomScriptEngine(PythonScriptEngine): It will execute python code read in from the bpmn. It will also make any scripts in the scripts directory available for execution. """ def __init__(self): - augment_methods = { + environment = TaskDataEnvironment({ 'custom_function': my_custom_function, 'timedelta': datetime.timedelta, - } - super().__init__(scripting_additions=augment_methods) + }) + super().__init__(environment=environment) class TimerCycleStartTest(BpmnWorkflowTestCase): diff --git a/tests/SpiffWorkflow/bpmn/events/TimerCycleTest.py b/tests/SpiffWorkflow/bpmn/events/TimerCycleTest.py index 5c61f381..452e71ed 100644 --- a/tests/SpiffWorkflow/bpmn/events/TimerCycleTest.py +++ b/tests/SpiffWorkflow/bpmn/events/TimerCycleTest.py @@ -5,6 +5,7 @@ import unittest import time from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from SpiffWorkflow.task import TaskState from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase @@ -22,11 +23,11 @@ class CustomScriptEngine(PythonScriptEngine): It will execute python code read in from the bpmn. It will also make any scripts in the scripts directory available for execution. """ def __init__(self): - augment_methods = { + environment = TaskDataEnvironment({ 'custom_function': my_custom_function, 'timedelta': datetime.timedelta, - } - super().__init__(scripting_additions=augment_methods) + }) + super().__init__(environment=environment) diff --git a/tests/SpiffWorkflow/bpmn/events/TimerDateTest.py b/tests/SpiffWorkflow/bpmn/events/TimerDateTest.py index deebd775..e56cc393 100644 --- a/tests/SpiffWorkflow/bpmn/events/TimerDateTest.py +++ b/tests/SpiffWorkflow/bpmn/events/TimerDateTest.py @@ -6,6 +6,7 @@ import time from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase __author__ = 'kellym' @@ -14,10 +15,10 @@ __author__ = 'kellym' class TimerDateTest(BpmnWorkflowTestCase): def setUp(self): - self.script_engine = PythonScriptEngine(default_globals={ + self.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({ "datetime": datetime.datetime, "timedelta": datetime.timedelta, - }) + })) self.spec, self.subprocesses = self.load_workflow_spec('timer-date-start.bpmn', 'date_timer') self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine) diff --git a/tests/SpiffWorkflow/bpmn/events/TimerDurationBoundaryOnTaskTest.py b/tests/SpiffWorkflow/bpmn/events/TimerDurationBoundaryOnTaskTest.py index aff5d429..1cd2c17b 100644 --- a/tests/SpiffWorkflow/bpmn/events/TimerDurationBoundaryOnTaskTest.py +++ b/tests/SpiffWorkflow/bpmn/events/TimerDurationBoundaryOnTaskTest.py @@ -6,6 +6,7 @@ from datetime import timedelta from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase __author__ = 'kellym' @@ -13,7 +14,7 @@ __author__ = 'kellym' class TimerDurationTest(BpmnWorkflowTestCase): def setUp(self): - self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta}) + self.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"timedelta": timedelta})) self.spec, self.subprocesses = self.load_workflow_spec('boundary_timer_on_task.bpmn', 'test_timer') self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine) diff --git a/tests/SpiffWorkflow/bpmn/events/TimerDurationTest.py b/tests/SpiffWorkflow/bpmn/events/TimerDurationTest.py index c8e72fcd..18cbd12d 100644 --- a/tests/SpiffWorkflow/bpmn/events/TimerDurationTest.py +++ b/tests/SpiffWorkflow/bpmn/events/TimerDurationTest.py @@ -5,6 +5,7 @@ import time from datetime import datetime, timedelta from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase __author__ = 'kellym' @@ -13,7 +14,7 @@ __author__ = 'kellym' class TimerDurationTest(BpmnWorkflowTestCase): def setUp(self): - self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta}) + self.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"timedelta": timedelta})) self.spec, self.subprocesses = self.load_workflow_spec('timer.bpmn', 'timer') self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine) diff --git a/tests/SpiffWorkflow/bpmn/serializer/BaseTestCase.py b/tests/SpiffWorkflow/bpmn/serializer/BaseTestCase.py index e392e5db..5fa99e92 100644 --- a/tests/SpiffWorkflow/bpmn/serializer/BaseTestCase.py +++ b/tests/SpiffWorkflow/bpmn/serializer/BaseTestCase.py @@ -4,7 +4,6 @@ import os from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnParser from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer -from tests.SpiffWorkflow.bpmn.BpmnLoaderForTests import TestUserTaskConverter class BaseTestCase(unittest.TestCase): @@ -21,7 +20,7 @@ class BaseTestCase(unittest.TestCase): def setUp(self): super(BaseTestCase, self).setUp() - wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter([TestUserTaskConverter]) + wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter() self.serializer = BpmnWorkflowSerializer(wf_spec_converter, version=self.SERIALIZER_VERSION) spec, subprocesses = self.load_workflow_spec('random_fact.bpmn', 'random_fact') self.workflow = BpmnWorkflow(spec, subprocesses) diff --git a/tests/SpiffWorkflow/bpmn/serializer/BpmnWorkflowSerializerTest.py b/tests/SpiffWorkflow/bpmn/serializer/BpmnWorkflowSerializerTest.py index ed547952..88612867 100644 --- a/tests/SpiffWorkflow/bpmn/serializer/BpmnWorkflowSerializerTest.py +++ b/tests/SpiffWorkflow/bpmn/serializer/BpmnWorkflowSerializerTest.py @@ -5,7 +5,6 @@ import json from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer from SpiffWorkflow.bpmn.workflow import BpmnWorkflow -from tests.SpiffWorkflow.bpmn.BpmnLoaderForTests import TestUserTaskConverter from .BaseTestCase import BaseTestCase @@ -71,7 +70,7 @@ class BpmnWorkflowSerializerTest(BaseTestCase): try: self.assertRaises(TypeError, self.serializer.serialize_json, self.workflow) - wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter([TestUserTaskConverter]) + wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter() custom_serializer = BpmnWorkflowSerializer(wf_spec_converter, version=self.SERIALIZER_VERSION,json_encoder_cls=MyJsonEncoder, json_decoder_cls=MyJsonDecoder) serialized_workflow = custom_serializer.serialize_json(self.workflow) finally: diff --git a/tests/SpiffWorkflow/bpmn/serializer/VersionMigrationTest.py b/tests/SpiffWorkflow/bpmn/serializer/VersionMigrationTest.py index cd38b5f8..cae051ba 100644 --- a/tests/SpiffWorkflow/bpmn/serializer/VersionMigrationTest.py +++ b/tests/SpiffWorkflow/bpmn/serializer/VersionMigrationTest.py @@ -3,9 +3,11 @@ import time from SpiffWorkflow.task import TaskState from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from .BaseTestCase import BaseTestCase + class VersionMigrationTest(BaseTestCase): SERIALIZER_VERSION = "1.2" @@ -24,7 +26,7 @@ class VersionMigrationTest(BaseTestCase): def test_convert_1_1_to_1_2(self): fn = os.path.join(self.DATA_DIR, 'serialization', 'v1-1.json') wf = self.serializer.deserialize_json(open(fn).read()) - wf.script_engine = PythonScriptEngine(default_globals={"time": time}) + wf.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"time": time})) wf.refresh_waiting_tasks() wf.do_engine_steps() - self.assertTrue(wf.is_completed()) \ No newline at end of file + self.assertTrue(wf.is_completed()) diff --git a/tests/SpiffWorkflow/camunda/BaseTestCase.py b/tests/SpiffWorkflow/camunda/BaseTestCase.py index 67d9c590..8cdde562 100644 --- a/tests/SpiffWorkflow/camunda/BaseTestCase.py +++ b/tests/SpiffWorkflow/camunda/BaseTestCase.py @@ -1,22 +1,20 @@ # -*- coding: utf-8 -*- import os +from copy import deepcopy from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer from SpiffWorkflow.camunda.parser.CamundaParser import CamundaParser -from SpiffWorkflow.camunda.serializer.task_spec_converters import UserTaskConverter, StartEventConverter, EndEventConverter, \ - IntermediateCatchEventConverter, IntermediateThrowEventConverter, BoundaryEventConverter +from SpiffWorkflow.camunda.serializer.config import CAMUNDA_SPEC_CONFIG -from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter +from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase +CAMUNDA_SPEC_CONFIG['task_specs'].append(BusinessRuleTaskConverter) __author__ = 'danfunk' -wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter([ - UserTaskConverter, BusinessRuleTaskConverter, StartEventConverter, - EndEventConverter, BoundaryEventConverter, IntermediateCatchEventConverter, - IntermediateThrowEventConverter]) +wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(CAMUNDA_SPEC_CONFIG) class BaseTestCase(BpmnWorkflowTestCase): """ Provides some basic tools for loading up and parsing camunda BPMN files """ diff --git a/tests/SpiffWorkflow/camunda/CallActivityMessageTest.py b/tests/SpiffWorkflow/camunda/CallActivityMessageTest.py index 400f6011..a7d9d6c5 100644 --- a/tests/SpiffWorkflow/camunda/CallActivityMessageTest.py +++ b/tests/SpiffWorkflow/camunda/CallActivityMessageTest.py @@ -16,7 +16,7 @@ class CallActivityMessageTest(BaseTestCase): def testRunThroughHappy(self): self.actual_test(save_restore=False) - def testThroughSaveRestore(self): + def testRunThroughSaveRestore(self): self.actual_test(save_restore=True) def actual_test(self, save_restore=False): diff --git a/tests/SpiffWorkflow/camunda/DMNCustomScriptTest.py b/tests/SpiffWorkflow/camunda/DMNCustomScriptTest.py index cde4662c..23d25634 100644 --- a/tests/SpiffWorkflow/camunda/DMNCustomScriptTest.py +++ b/tests/SpiffWorkflow/camunda/DMNCustomScriptTest.py @@ -1,5 +1,6 @@ import unittest from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from .BaseTestCase import BaseTestCase @@ -12,8 +13,8 @@ def my_custom_function(txt): class CustomScriptEngine(PythonScriptEngine): def __init__(self): - augment_methods = {'my_custom_function': my_custom_function} - super().__init__(scripting_additions=augment_methods) + environment = TaskDataEnvironment({'my_custom_function': my_custom_function}) + super().__init__(environment=environment) class DMNCustomScriptTest(BaseTestCase): diff --git a/tests/SpiffWorkflow/camunda/MessageBoundaryEventTest.py b/tests/SpiffWorkflow/camunda/MessageBoundaryEventTest.py index ce830b3a..8c0bf3c9 100644 --- a/tests/SpiffWorkflow/camunda/MessageBoundaryEventTest.py +++ b/tests/SpiffWorkflow/camunda/MessageBoundaryEventTest.py @@ -7,6 +7,7 @@ from datetime import timedelta from SpiffWorkflow.task import TaskState from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from .BaseTestCase import BaseTestCase __author__ = 'kellym' @@ -15,7 +16,7 @@ __author__ = 'kellym' class MessageBoundaryTest(BaseTestCase): def setUp(self): - self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta}) + self.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"timedelta": timedelta})) self.spec, self.subprocesses = self.load_workflow_spec('MessageBoundary.bpmn', 'Process_1kjyavs') self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine) diff --git a/tests/SpiffWorkflow/camunda/MultiInstanceDMNTest.py b/tests/SpiffWorkflow/camunda/MultiInstanceDMNTest.py index c381d5b6..e31aa083 100644 --- a/tests/SpiffWorkflow/camunda/MultiInstanceDMNTest.py +++ b/tests/SpiffWorkflow/camunda/MultiInstanceDMNTest.py @@ -1,6 +1,8 @@ import unittest from SpiffWorkflow.bpmn.workflow import BpmnWorkflow +from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BoxedTaskDataEnvironment from .BaseTestCase import BaseTestCase @@ -10,12 +12,13 @@ class MultiInstanceDMNTest(BaseTestCase): self.spec, subprocesses = self.load_workflow_spec( 'DMNMultiInstance.bpmn', 'Process_1', 'test_integer_decision_multi.dmn') self.workflow = BpmnWorkflow(self.spec) + self.script_engine = PythonScriptEngine(environment=BoxedTaskDataEnvironment()) + self.workflow.script_engine = self.script_engine def testConstructor(self): pass # this is accomplished through setup. def testDmnHappy(self): - self.workflow = BpmnWorkflow(self.spec) self.workflow.do_engine_steps() self.workflow.complete_next() self.workflow.do_engine_steps() @@ -25,16 +28,19 @@ class MultiInstanceDMNTest(BaseTestCase): def testDmnSaveRestore(self): - self.workflow = BpmnWorkflow(self.spec) self.save_restore() + self.workflow.script_engine = self.script_engine self.workflow.do_engine_steps() self.workflow.complete_next() self.save_restore() + self.workflow.script_engine = self.script_engine self.workflow.do_engine_steps() self.workflow.complete_next() self.save_restore() + self.workflow.script_engine = self.script_engine self.workflow.do_engine_steps() self.save_restore() + self.workflow.script_engine = self.script_engine self.assertEqual(self.workflow.data['stuff']['E']['y'], 'D') diff --git a/tests/SpiffWorkflow/camunda/data/exclusive_gateway_pmi.bpmn b/tests/SpiffWorkflow/camunda/data/exclusive_gateway_pmi.bpmn deleted file mode 100644 index 4acb9f8e..00000000 --- a/tests/SpiffWorkflow/camunda/data/exclusive_gateway_pmi.bpmn +++ /dev/null @@ -1,94 +0,0 @@ - - - - - Flow_1wis1un - - - - - - - - Flow_1wis1un - Flow_144jxvd - - - - Flow_144jxvd - Flow_1riszc2 - Flow_0xdvee4 - - - - morestuff == 'Yes' - - - Flow_13ncefd - Flow_0xdvee4 - - - - - - - - - Flow_1riszc2 - Flow_13ncefd - - 3 - - - - morestuff == 'No' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/SpiffWorkflow/camunda/data/random_fact.svg b/tests/SpiffWorkflow/camunda/data/random_fact.svg deleted file mode 100644 index 3078ea0e..00000000 --- a/tests/SpiffWorkflow/camunda/data/random_fact.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - -Set TypeDisplay FactUser sets the Fact.type to cat,norris, or buzzwordMakes an API call to get a factof the required type. \ No newline at end of file diff --git a/tests/SpiffWorkflow/camunda/data/top_workflow.bpmn b/tests/SpiffWorkflow/camunda/data/top_workflow.bpmn deleted file mode 100644 index c3657374..00000000 --- a/tests/SpiffWorkflow/camunda/data/top_workflow.bpmn +++ /dev/null @@ -1,64 +0,0 @@ - - - - - Flow_1xegt6f - - - - - Flow_0qc6vpv - - - - - Flow_1xegt6f - Flow_11qyfqv - my_custom_function('test 1 from top workflow') - - - Flow_11qyfqv - Flow_0hntmrc - - - Flow_0hntmrc - Flow_0qc6vpv - my_custom_function('test 2 from top workflow') - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/SpiffWorkflow/camunda/specs/UserTaskSpecTest.py b/tests/SpiffWorkflow/camunda/specs/UserTaskSpecTest.py index 3de8fa2a..b24f1c37 100644 --- a/tests/SpiffWorkflow/camunda/specs/UserTaskSpecTest.py +++ b/tests/SpiffWorkflow/camunda/specs/UserTaskSpecTest.py @@ -1,7 +1,8 @@ import unittest from SpiffWorkflow.camunda.specs.UserTask import FormField, UserTask, Form, EnumFormField -from SpiffWorkflow.camunda.serializer.task_spec_converters import UserTaskConverter +from SpiffWorkflow.camunda.serializer.task_spec import UserTaskConverter +from SpiffWorkflow.bpmn.serializer.helpers.dictionary import DictionaryConverter from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec @@ -53,7 +54,7 @@ class UserTaskSpecTest(unittest.TestCase): self.form.add_field(field1) self.form.add_field(field2) - converter = UserTaskConverter() + converter = UserTaskConverter(DictionaryConverter()) dct = converter.to_dict(self.user_spec) self.assertEqual(dct['name'], 'userTask') self.assertEqual(dct['form'], { diff --git a/tests/SpiffWorkflow/dmn/DecisionRunner.py b/tests/SpiffWorkflow/dmn/DecisionRunner.py index 133f1292..efb9d89b 100644 --- a/tests/SpiffWorkflow/dmn/DecisionRunner.py +++ b/tests/SpiffWorkflow/dmn/DecisionRunner.py @@ -2,7 +2,7 @@ import os from lxml import etree -from SpiffWorkflow.bpmn.PythonScriptEngine import Box +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box from SpiffWorkflow.dmn.engine.DMNEngine import DMNEngine from SpiffWorkflow.dmn.parser.DMNParser import DMNParser, get_dmn_ns diff --git a/tests/SpiffWorkflow/dmn/HitPolicyTest.py b/tests/SpiffWorkflow/dmn/HitPolicyTest.py index 061ba660..898aeeb3 100644 --- a/tests/SpiffWorkflow/dmn/HitPolicyTest.py +++ b/tests/SpiffWorkflow/dmn/HitPolicyTest.py @@ -1,14 +1,11 @@ import os import unittest -from SpiffWorkflow.dmn.engine.DMNEngine import DMNEngine +from SpiffWorkflow.bpmn.serializer.helpers.dictionary import DictionaryConverter from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser -from SpiffWorkflow.dmn.serializer.task_spec_converters import \ - BusinessRuleTaskConverter +from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase -from tests.SpiffWorkflow.dmn.DecisionRunner import DecisionRunner -from tests.SpiffWorkflow.dmn.python_engine.PythonDecisionRunner import \ - PythonDecisionRunner +from tests.SpiffWorkflow.dmn.python_engine.PythonDecisionRunner import PythonDecisionRunner class HitPolicyTest(BpmnWorkflowTestCase): @@ -38,8 +35,8 @@ class HitPolicyTest(BpmnWorkflowTestCase): runner = PythonDecisionRunner(file_name) decision_table = runner.decision_table self.assertEqual("COLLECT", decision_table.hit_policy) - dict = BusinessRuleTaskConverter().decision_table_to_dict(decision_table) - new_table = BusinessRuleTaskConverter().decision_table_from_dict(dict) + dict = BusinessRuleTaskConverter(DictionaryConverter()).decision_table_to_dict(decision_table) + new_table = BusinessRuleTaskConverter(DictionaryConverter()).decision_table_from_dict(dict) self.assertEqual("COLLECT", new_table.hit_policy) def suite(): diff --git a/tests/SpiffWorkflow/dmn/feel_engine/FeelDictDecisionTest.py b/tests/SpiffWorkflow/dmn/feel_engine/FeelDictDecisionTest.py index 1ab727f9..95c539c8 100644 --- a/tests/SpiffWorkflow/dmn/feel_engine/FeelDictDecisionTest.py +++ b/tests/SpiffWorkflow/dmn/feel_engine/FeelDictDecisionTest.py @@ -1,6 +1,6 @@ import unittest -from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box from .FeelDecisionRunner import FeelDecisionRunner @@ -19,7 +19,7 @@ class FeelDictDecisionTestClass(unittest.TestCase): "PEANUTS": {"delicious": True}, "SPAM": {"delicious": False} }} - PythonScriptEngine.convert_to_box(PythonScriptEngine(), data) + Box.convert_to_box(data) res = self.runner.decide(data) self.assertEqual(res.description, 'They are allergic to peanuts') diff --git a/tests/SpiffWorkflow/dmn/feel_engine/FeelDictDotNotationDecisionTest.py b/tests/SpiffWorkflow/dmn/feel_engine/FeelDictDotNotationDecisionTest.py index bf19b44e..6978fa9f 100644 --- a/tests/SpiffWorkflow/dmn/feel_engine/FeelDictDotNotationDecisionTest.py +++ b/tests/SpiffWorkflow/dmn/feel_engine/FeelDictDotNotationDecisionTest.py @@ -1,6 +1,6 @@ import unittest -from SpiffWorkflow.bpmn.PythonScriptEngine import Box +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box from .FeelDecisionRunner import FeelDecisionRunner diff --git a/tests/SpiffWorkflow/dmn/python_engine/DictDotNotationDecisionTest.py b/tests/SpiffWorkflow/dmn/python_engine/DictDotNotationDecisionTest.py index 375b9fd1..a9e9e2d5 100644 --- a/tests/SpiffWorkflow/dmn/python_engine/DictDotNotationDecisionTest.py +++ b/tests/SpiffWorkflow/dmn/python_engine/DictDotNotationDecisionTest.py @@ -1,6 +1,6 @@ import unittest -from SpiffWorkflow.bpmn.PythonScriptEngine import Box +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box from .PythonDecisionRunner import PythonDecisionRunner diff --git a/tests/SpiffWorkflow/dmn/python_engine/DictDotNotationDecisionWeirdCharactersTest.py b/tests/SpiffWorkflow/dmn/python_engine/DictDotNotationDecisionWeirdCharactersTest.py index d81acb47..df569ccd 100644 --- a/tests/SpiffWorkflow/dmn/python_engine/DictDotNotationDecisionWeirdCharactersTest.py +++ b/tests/SpiffWorkflow/dmn/python_engine/DictDotNotationDecisionWeirdCharactersTest.py @@ -1,6 +1,6 @@ import unittest -from SpiffWorkflow.bpmn.PythonScriptEngine import Box +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box from .PythonDecisionRunner import PythonDecisionRunner diff --git a/tests/SpiffWorkflow/dmn/python_engine/PythonDecisionRunner.py b/tests/SpiffWorkflow/dmn/python_engine/PythonDecisionRunner.py index c3ef77ce..e2e753c6 100644 --- a/tests/SpiffWorkflow/dmn/python_engine/PythonDecisionRunner.py +++ b/tests/SpiffWorkflow/dmn/python_engine/PythonDecisionRunner.py @@ -2,11 +2,12 @@ import datetime from decimal import Decimal from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment from ..DecisionRunner import DecisionRunner class PythonDecisionRunner(DecisionRunner): def __init__(self, filename): - scripting_additions={'Decimal': Decimal, 'datetime': datetime} - super().__init__(PythonScriptEngine(scripting_additions=scripting_additions), filename, 'python_engine') + environment = TaskDataEnvironment({'Decimal': Decimal, 'datetime': datetime}) + super().__init__(PythonScriptEngine(environment=environment), filename, 'python_engine') diff --git a/tests/SpiffWorkflow/spiff/BaseTestCase.py b/tests/SpiffWorkflow/spiff/BaseTestCase.py index b085d1f7..f1826a78 100644 --- a/tests/SpiffWorkflow/spiff/BaseTestCase.py +++ b/tests/SpiffWorkflow/spiff/BaseTestCase.py @@ -1,27 +1,17 @@ # -*- coding: utf-8 -*- import os +from copy import deepcopy from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser, VALIDATOR -from SpiffWorkflow.spiff.serializer.task_spec_converters import NoneTaskConverter, \ - ManualTaskConverter, UserTaskConverter, ScriptTaskConverter, \ - SubWorkflowTaskConverter, TransactionSubprocessConverter, \ - CallActivityTaskConverter, \ - StartEventConverter, EndEventConverter, BoundaryEventConverter, \ - SendTaskConverter, ReceiveTaskConverter, \ - IntermediateCatchEventConverter, IntermediateThrowEventConverter, \ - ServiceTaskConverter -from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter +from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG +from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase -wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter([ - NoneTaskConverter, ManualTaskConverter, UserTaskConverter, ScriptTaskConverter, - SubWorkflowTaskConverter, TransactionSubprocessConverter, CallActivityTaskConverter, - StartEventConverter, EndEventConverter, BoundaryEventConverter, SendTaskConverter, ReceiveTaskConverter, - IntermediateCatchEventConverter, IntermediateThrowEventConverter, BusinessRuleTaskConverter, - ServiceTaskConverter -]) +SPIFF_SPEC_CONFIG['task_specs'].append(BusinessRuleTaskConverter) + +wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(SPIFF_SPEC_CONFIG) class BaseTestCase(BpmnWorkflowTestCase): """ Provides some basic tools for loading up and parsing Spiff extensions""" From d71491dd67f8b18934e60905a15d81d7c0a09485 Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 2 Feb 2023 21:00:20 -0500 Subject: [PATCH 34/40] remove flask-bpmn --- bin/pull-subtrees | 1 - 1 file changed, 1 deletion(-) diff --git a/bin/pull-subtrees b/bin/pull-subtrees index 8002791b..ee1e988f 100755 --- a/bin/pull-subtrees +++ b/bin/pull-subtrees @@ -10,7 +10,6 @@ set -o errtrace -o errexit -o nounset -o pipefail for subtree in "SpiffWorkflow" \ "spiffworkflow-backend" \ "spiffworkflow-frontend" \ - "flask-bpmn" \ "bpmn-js-spiffworkflow" \ "connector-proxy-demo" do From 58f9d87a00532413f9a4edfc9ec90203d4c500a1 Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 2 Feb 2023 21:55:26 -0500 Subject: [PATCH 35/40] import EventBasedGatewayConverter from correct package --- poetry.lock | 2 +- .../services/process_instance_processor.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3608303e..2c2711b5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1760,7 +1760,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "1f51db962ccaed5810f5d0f7d76a932f056430ab" +resolved_reference = "0e61be85c47474a33037e6f398e64c96e02f13ad" [[package]] name = "sqlalchemy" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index b45add69..022560c6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -43,7 +43,7 @@ from SpiffWorkflow.exceptions import WorkflowException # type: ignore from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ignore -from SpiffWorkflow.spiff.serializer.task_spec_converters import ( # type: ignore +from SpiffWorkflow.bpmn.serializer.task_spec import ( # type: ignore EventBasedGatewayConverter, ) from SpiffWorkflow.task import Task as SpiffTask # type: ignore From a31d89b25a840ec9d9d458c0cf5336b83d2288e9 Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 2 Feb 2023 22:04:34 -0500 Subject: [PATCH 36/40] couple last serializer updates --- .../services/process_instance_processor.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 022560c6..c9d43f92 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -30,6 +30,9 @@ from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine # type: ig from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BasePythonScriptEngineEnvironment # type: ignore from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BoxedTaskDataEnvironment +from SpiffWorkflow.bpmn.serializer.task_spec import ( # type: ignore + EventBasedGatewayConverter, +) from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore @@ -43,9 +46,6 @@ from SpiffWorkflow.exceptions import WorkflowException # type: ignore from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ignore -from SpiffWorkflow.bpmn.serializer.task_spec import ( # type: ignore - EventBasedGatewayConverter, -) from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore @@ -393,7 +393,7 @@ class ProcessInstanceProcessor: SPIFF_SPEC_CONFIG ) _serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION) - _event_serializer = EventBasedGatewayConverter() + _event_serializer = EventBasedGatewayConverter(wf_spec_converter) PROCESS_INSTANCE_ID_KEY = "process_instance_id" VALIDATION_PROCESS_KEY = "validate_only" @@ -971,7 +971,7 @@ class ProcessInstanceProcessor: def send_bpmn_event(self, event_data: dict[str, Any]) -> None: """Send an event to the workflow.""" payload = event_data.pop("payload", None) - event_definition = self._event_serializer.restore(event_data) + event_definition = self._event_serializer.registry.restore(event_data) if payload is not None: event_definition.payload = payload current_app.logger.info( From b9783ae0edc6206022f8d0f10e3564c6ac22f8b4 Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 3 Feb 2023 11:06:40 -0500 Subject: [PATCH 37/40] clean up sentry notification and avoid logger.exception when we do not want sentry --- .../exceptions/api_error.py | 38 ++++++++++++++----- 1 file changed, 28 insertions(+), 10 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py index ab5bf1c3..46d2ad54 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py @@ -171,18 +171,30 @@ def set_user_sentry_context() -> None: set_tag("username", username) +def should_notify_sentry(exception: Exception) -> bool: + """Determine if we should notify sentry. + + We want to capture_exception to log the exception to sentry, but we don't want to log: + 1. ApiErrors that are just invalid tokens + 2. NotAuthorizedError. we usually call check-permissions before calling an API to + make sure we'll have access, but there are some cases + where it's more convenient to just make the call from the frontend and handle the 403 appropriately. + """ + if isinstance(exception, ApiError): + if exception.error_code == "invalid_token": + return False + if isinstance(exception, NotAuthorizedError): + return False + return True + + @api_error_blueprint.app_errorhandler(Exception) # type: ignore def handle_exception(exception: Exception) -> flask.wrappers.Response: """Handles unexpected exceptions.""" set_user_sentry_context() sentry_link = None - # we want to capture_exception to log the exception to sentry, but we don't want to log: - # 1. ApiErrors that are just invalid tokens - # 2. NotAuthorizedError - if ( - not isinstance(exception, ApiError) or exception.error_code != "invalid_token" - ) and not isinstance(exception, NotAuthorizedError): + if should_notify_sentry(exception): id = capture_exception(exception) if isinstance(exception, ApiError): @@ -198,10 +210,16 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response: f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}" ) - # !!!NOTE!!!: do this after sentry stuff since calling logger.exception - # seems to break the sentry sdk context where we no longer get back - # an event id or send out tags like username - current_app.logger.exception(exception) + # !!!NOTE!!!: do this after sentry stuff since calling logger.exception + # seems to break the sentry sdk context where we no longer get back + # an event id or send out tags like username + current_app.logger.exception(exception) + else: + current_app.logger.error( + f"Received exception: {exception}. Since we do not want this particular" + " exception in sentry, we cannot use logger.exception, so there will be no" + " backtrace. see api_error.py" + ) error_code = "internal_server_error" status_code = 500 From 18070c5be32a9eec99d0b754f6cf7503299d5080 Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 3 Feb 2023 12:51:57 -0500 Subject: [PATCH 38/40] make test_user_lists more complete and correct --- .../keycloak/bin/add_test_users_to_keycloak | 6 +++ .../realm_exports/sartography-realm.json | 2 +- .../keycloak/test_user_lists/sartography | 9 ++-- .../keycloak/test_user_lists/status | 45 ++++++++++++------- 4 files changed, 42 insertions(+), 20 deletions(-) diff --git a/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak b/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak index 5ad11e13..9a045ffe 100755 --- a/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak +++ b/spiffworkflow-backend/keycloak/bin/add_test_users_to_keycloak @@ -7,7 +7,13 @@ function error_handler() { trap 'error_handler ${LINENO} $?' ERR set -o errtrace -o errexit -o nounset -o pipefail +# you can get a list of users from the keycloak realm file like: +# grep '"email" :' keycloak/realm_exports/spiffworkflow-realm.json | awk -F : '{print $2}' | sed -E 's/ "//g' | sed -E 's/",//g' > s + +# we keep some of these in keycloak/test_user_lists +# spiffworkflow-realm.json is a mashup of the status and sartography user lists. user_file_with_one_email_per_line="${1:-}" + keycloak_realm="${2:-spiffworkflow}" if [[ -z "${1:-}" ]]; then >&2 echo "usage: $(basename "$0") [user_file_with_one_email_per_line]" diff --git a/spiffworkflow-backend/keycloak/realm_exports/sartography-realm.json b/spiffworkflow-backend/keycloak/realm_exports/sartography-realm.json index 37704ea5..20c19e24 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/sartography-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/sartography-realm.json @@ -547,7 +547,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, - "email" : "kevin@sartography.com", + "email" : "kb@sartography.com", "credentials" : [ { "id" : "4057e784-689d-47c0-a164-035a69e78edf", "type" : "password", diff --git a/spiffworkflow-backend/keycloak/test_user_lists/sartography b/spiffworkflow-backend/keycloak/test_user_lists/sartography index 4f98a51e..b6f685b8 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/sartography +++ b/spiffworkflow-backend/keycloak/test_user_lists/sartography @@ -1,8 +1,11 @@ +admin@spiffworkflow.org alex@sartography.com dan@sartography.com -kevin@sartography.com -jason@sartography.com -mike@sartography.com +daniel@sartography.com elizabeth@sartography.com +jason@sartography.com jon@sartography.com +kb@sartography.com +madhurya@sartography.com +mike@sartography.com natalia@sartography.com diff --git a/spiffworkflow-backend/keycloak/test_user_lists/status b/spiffworkflow-backend/keycloak/test_user_lists/status index 667c4f03..cb510747 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/status +++ b/spiffworkflow-backend/keycloak/test_user_lists/status @@ -1,25 +1,38 @@ +admin@spiffworkflow.org +amir@status.im +app.program.lead@status.im +core@status.im +dao.project.lead@status.im +desktop.program.lead@status.im +desktop.project.lead@status.im +fin1@status.im +fin@status.im finance.lead@status.im -legal.lead@status.im -program.lead@status.im -services.lead@status.im finance.sme@status.im -infra.sme@status.im -legal.sme@status.im -security.sme@status.im -ppg.ba@status.im -peopleops.partner@status.im -peopleops.talent@status.im +finance_user1@status.im +harmeet@status.im infra.program-lead@status.im infra.project-lead@status.im -dao.project.lead@status.im -desktop.project.lead@status.im -app.program.lead@status.im -desktop.program.lead@status.im -legal.program-lead.sme@status.im -legal.project-lead.sme@status.im -legal1.sme@status.im +infra.sme@status.im infra1.sme@status.im infra2.sme@status.im +jakub@status.im +jarrad@status.im +lead1@status.im +lead@status.im +legal.lead@status.im +legal.program-lead.sme@status.im +legal.project-lead.sme@status.im +legal.sme@status.im +legal1.sme@status.im +manuchehr@status.im +peopleops.partner@status.im +peopleops.talent@status.im +ppg.ba@status.im +program.lead@status.im +sasha@status.im security.program-lead.sme@status.im security.project-lead.sme@status.im +security.sme@status.im security1.sme@status.im +services.lead@status.im From b782c3faa763569f059b7e2189bd34b029f8258e Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 3 Feb 2023 13:02:50 -0500 Subject: [PATCH 39/40] remove service accounts, formalize j, add madhurya --- .../keycloak/bin/export_keycloak_realms | 3 + .../realm_exports/spiffworkflow-realm.json | 106 ++++++++---------- .../keycloak/test_user_lists/sartography | 2 + 3 files changed, 50 insertions(+), 61 deletions(-) diff --git a/spiffworkflow-backend/keycloak/bin/export_keycloak_realms b/spiffworkflow-backend/keycloak/bin/export_keycloak_realms index f205d0d7..7e55ae6f 100755 --- a/spiffworkflow-backend/keycloak/bin/export_keycloak_realms +++ b/spiffworkflow-backend/keycloak/bin/export_keycloak_realms @@ -21,6 +21,9 @@ docker exec keycloak /opt/keycloak/bin/kc.sh export --dir "${docker_container_pa docker cp "keycloak:${docker_container_path}" "$local_tmp_dir" for realm in $realms ; do + if ! grep -Eq '\-realm$' <<< "$realm"; then + realm="${realm}-realm" + fi cp "${local_tmp_dir}/hey/${realm}.json" "${script_dir}/../realm_exports/" done diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index 634caef7..c81e57ad 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -903,7 +903,7 @@ "emailVerified" : false, "firstName" : "", "lastName" : "", - "email" : "j@status.im", + "email" : "j@sartography.com", "credentials" : [ { "id" : "e71ec785-9133-4b7d-8015-1978379af0bb", "type" : "password", @@ -1163,6 +1163,26 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "99ce8a54-2941-4767-8ddf-52320b3708bd", + "createdTimestamp" : 1675447085191, + "username" : "madhurya", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "madhurya@sartography.com", + "credentials" : [ { + "id" : "4fa2bf1f-188e-42e3-9633-01d436864206", + "type" : "password", + "createdDate" : 1675447085252, + "secretData" : "{\"value\":\"6ZApQ7kx4YDc5ojW9eyFiSKMz5l3/Zl5PIScHEW1gtP3lrnnWqWgwcP+8cWkKdm3im+XrZwDQHjuGjGN5Rbjyw==\",\"salt\":\"HT3fCh245v8etRFIprXsyw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "6f5bfa09-7494-4a2f-b871-cf327048cac7", "createdTimestamp" : 1665517010600, @@ -1405,42 +1425,6 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] - }, { - "id" : "487d3a85-89dd-4839-957a-c3f6d70551f6", - "createdTimestamp" : 1657115173081, - "username" : "service-account-spiffworkflow-backend", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "service-account@status.im", - "serviceAccountClientId" : "spiffworkflow-backend", - "credentials" : [ ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "clientRoles" : { - "spiffworkflow-backend" : [ "uma_protection" ] - }, - "notBefore" : 0, - "groups" : [ ] - }, { - "id" : "22de68b1-4b06-4bc2-8da6-0c577e7e62ad", - "createdTimestamp" : 1657055472800, - "username" : "service-account-withauth", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "email" : "service-account-withauth@status.im", - "serviceAccountClientId" : "withAuth", - "credentials" : [ ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "clientRoles" : { - "withAuth" : [ "uma_protection" ] - }, - "notBefore" : 0, - "groups" : [ ] }, { "id" : "3d45bb85-0a2d-4b15-8a19-d26a5619d359", "createdTimestamp" : 1674148694810, @@ -2674,7 +2658,7 @@ "subType" : "authenticated", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-address-mapper", "saml-user-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-full-name-mapper" ] } }, { "id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", @@ -2692,7 +2676,7 @@ "subType" : "anonymous", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "saml-role-list-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-address-mapper", "oidc-usermodel-property-mapper" ] + "allowed-protocol-mapper-types" : [ "saml-role-list-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper", "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-address-mapper", "saml-user-attribute-mapper" ] } }, { "id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c", @@ -2782,7 +2766,7 @@ "internationalizationEnabled" : false, "supportedLocales" : [ ], "authenticationFlows" : [ { - "id" : "feafc299-fede-4880-9e23-eb81aca22808", + "id" : "8facbab5-bca2-42c6-8608-ed94dacefe92", "alias" : "Account verification options", "description" : "Method with which to verity the existing account", "providerId" : "basic-flow", @@ -2804,7 +2788,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "ce7904d0-9182-49a2-aa71-a7b43e21f3ac", + "id" : "be52bd38-2def-41e7-a021-69bae78e92b7", "alias" : "Authentication Options", "description" : "Authentication options.", "providerId" : "basic-flow", @@ -2833,7 +2817,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "d9c6909a-5cc1-4ddf-b297-dbfcf6e609a6", + "id" : "ee18f6d1-9ca3-4535-a7a0-9759f3841513", "alias" : "Browser - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2855,7 +2839,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "083a589e-a486-42b6-ae73-1ec983967ff5", + "id" : "c76481eb-7997-4231-abac-632afd97631f", "alias" : "Direct Grant - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2877,7 +2861,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7f0248b0-2d51-4175-9fd2-52b606a39e26", + "id" : "14fe94d2-f3ef-4349-9cbe-79921c013108", "alias" : "First broker login - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2899,7 +2883,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "44465f1f-c700-4ec0-a234-d95c994c9e25", + "id" : "533c45e3-10d9-480b-9c9b-c2f746fb6f66", "alias" : "Handle Existing Account", "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", "providerId" : "basic-flow", @@ -2921,7 +2905,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "8cf09055-5b98-4fc8-b867-3dffacdec21b", + "id" : "1161d043-26ba-420c-baed-b220bcef40f1", "alias" : "Reset - Conditional OTP", "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", "providerId" : "basic-flow", @@ -2943,7 +2927,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "16b50b3e-4240-4f49-a85e-1bfd40def300", + "id" : "cbba8afb-920f-4ae0-85f3-6bc520485dc2", "alias" : "User creation or linking", "description" : "Flow for the existing/non-existing user alternatives", "providerId" : "basic-flow", @@ -2966,7 +2950,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "2aa981ae-d67e-49fb-95a4-91de1e5ab724", + "id" : "7b349cd1-fb1c-4d04-b5b5-885352277562", "alias" : "Verify Existing Account by Re-authentication", "description" : "Reauthentication of existing account", "providerId" : "basic-flow", @@ -2988,7 +2972,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "cf8406f7-09c3-4614-a898-99c9d66746f6", + "id" : "de10b07d-98b5-483c-b193-b1b93229478f", "alias" : "browser", "description" : "browser based authentication", "providerId" : "basic-flow", @@ -3024,7 +3008,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "e1ec7d6e-7612-4c5b-afce-c7f4fddbf6ec", + "id" : "4504d37b-3a2d-4cc9-b300-29482d86c72e", "alias" : "clients", "description" : "Base authentication for clients", "providerId" : "client-flow", @@ -3060,7 +3044,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "f5862b09-6e01-4c88-b44e-26dc59d71b80", + "id" : "9d86bdff-ba8e-433a-8536-a49c0af5faf2", "alias" : "direct grant", "description" : "OpenID Connect Resource Owner Grant", "providerId" : "basic-flow", @@ -3089,7 +3073,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7caa8611-8b13-437e-83b2-556899b5444f", + "id" : "546d31fc-a885-46eb-94bd-171d04f16a7c", "alias" : "docker auth", "description" : "Used by Docker clients to authenticate against the IDP", "providerId" : "basic-flow", @@ -3104,7 +3088,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "91d40deb-344f-4e0b-a845-98b2fc4a633a", + "id" : "70e5d629-4338-4aec-8671-fc7cf4c450b1", "alias" : "first broker login", "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", "providerId" : "basic-flow", @@ -3127,7 +3111,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "f221b5e6-1bcc-4b37-ba61-4d3bc6a30a8b", + "id" : "7213dc19-6e0b-4241-bef6-2409346a2745", "alias" : "forms", "description" : "Username, password, otp and other auth forms.", "providerId" : "basic-flow", @@ -3149,7 +3133,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "3ed8e597-19af-4ec8-b532-a97311f52de3", + "id" : "f91a8499-8cf5-408c-b85d-40e85a3f6ee3", "alias" : "http challenge", "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", "providerId" : "basic-flow", @@ -3171,7 +3155,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "3970fd16-3786-4eb3-9efe-453d0984b18b", + "id" : "9ec3751c-619e-4edc-a14f-4ac9c60b056f", "alias" : "registration", "description" : "registration flow", "providerId" : "basic-flow", @@ -3187,7 +3171,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "e26b27b4-c957-491c-bb6d-9d226b22399c", + "id" : "8048e711-8e77-4b85-8b26-243948a7c2f4", "alias" : "registration form", "description" : "registration form", "providerId" : "form-flow", @@ -3223,7 +3207,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "3ae37429-a623-42e3-a4a1-f9586b96b730", + "id" : "5a08de49-dd24-4e53-a656-9fac52fc6d2b", "alias" : "reset credentials", "description" : "Reset credentials for a user if they forgot their password or something", "providerId" : "basic-flow", @@ -3259,7 +3243,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7606ecd5-eb13-4aee-bd9f-3ec4ce77c59c", + "id" : "42bc970f-3ee5-429c-a543-e8078808d371", "alias" : "saml ecp", "description" : "SAML ECP Profile Authentication Flow", "providerId" : "basic-flow", @@ -3275,13 +3259,13 @@ } ] } ], "authenticatorConfig" : [ { - "id" : "058b3c89-4ea4-43fa-b337-e523b1d93ec3", + "id" : "23f4f930-3290-4a63-ac96-f7ddc04fbce2", "alias" : "create unique user config", "config" : { "require.password.update.after.registration" : "false" } }, { - "id" : "21410ac7-4b82-4f19-aae2-43ac33ba3f8f", + "id" : "4cfa7fa4-1a9b-4464-9510-460208e345eb", "alias" : "review profile config", "config" : { "update.profile.on.first.login" : "missing" diff --git a/spiffworkflow-backend/keycloak/test_user_lists/sartography b/spiffworkflow-backend/keycloak/test_user_lists/sartography index b6f685b8..1b7166bb 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/sartography +++ b/spiffworkflow-backend/keycloak/test_user_lists/sartography @@ -3,9 +3,11 @@ alex@sartography.com dan@sartography.com daniel@sartography.com elizabeth@sartography.com +j@sartography.com jason@sartography.com jon@sartography.com kb@sartography.com +kevin@sartography.com madhurya@sartography.com mike@sartography.com natalia@sartography.com From b481de0a61fba4dbfd5b894e7bb60f9faa04cf03 Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 3 Feb 2023 13:11:39 -0500 Subject: [PATCH 40/40] add more users, and try to prevent sentry notification again --- .../realm_exports/spiffworkflow-realm.json | 244 ++++++++++++++++-- .../keycloak/test_user_lists/status | 8 + .../exceptions/api_error.py | 6 +- 3 files changed, 230 insertions(+), 28 deletions(-) diff --git a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json index c81e57ad..722f1276 100644 --- a/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json +++ b/spiffworkflow-backend/keycloak/realm_exports/spiffworkflow-realm.json @@ -1083,6 +1083,26 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "e911fb0f-fd07-4886-acbf-d00930d293d3", + "createdTimestamp" : 1675447845512, + "username" : "legal.program-lead", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "legal.program-lead@status.im", + "credentials" : [ { + "id" : "9676d8d3-1e8c-4f5d-b5f7-49745cecf8fd", + "type" : "password", + "createdDate" : 1675447845577, + "secretData" : "{\"value\":\"vTffScfGXIjWWyDDfzo7JPiJe9VjAtrmds382EeV7N+wYNapJmLTVModkBsmGPy4TmWLc9BoysQynOaanSGi9Q==\",\"salt\":\"67ZxTEnar8aq4LZLhSNTFg==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "eff82d12-9a67-4002-b3c5-37811bd45199", "createdTimestamp" : 1675349217585, @@ -1103,6 +1123,26 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "4ed2b5a2-16c2-4029-ae97-d75c60f2147f", + "createdTimestamp" : 1675447845616, + "username" : "legal.project-lead", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "legal.project-lead@status.im", + "credentials" : [ { + "id" : "fd0b0d0a-8a3e-48c9-b17b-023e87057048", + "type" : "password", + "createdDate" : 1675447845652, + "secretData" : "{\"value\":\"l/DPfNBcHINV8lCf9nEyCJkFvaMGnLqcd1Y8t9taLqxb8r/ofY2ce79C19JCHDQJXRPRuCsMoobuFhhNR6aQmg==\",\"salt\":\"2ivCPrNc56396ldlwpQP6Q==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "8cd6feba-5ca6-4cfb-bc1a-a52c80595783", "createdTimestamp" : 1675349217698, @@ -1305,6 +1345,86 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "9f703c96-02f1-403c-b070-25feb86cfe21", + "createdTimestamp" : 1675447845811, + "username" : "ppg.ba.program-lead", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "ppg.ba.program-lead@status.im", + "credentials" : [ { + "id" : "bf74118b-b28f-4d2f-8bfa-7b9d1a8345f2", + "type" : "password", + "createdDate" : 1675447845847, + "secretData" : "{\"value\":\"wFUAB6E98gE222nCfsKe6P3kSZxeOSjhflsxon8kw/dY4ZwN0KMwvlYuNhmoptTLqDQJyqUiydmlMK0NS4JjTQ==\",\"salt\":\"YCPk4Tc3eXcoes78oLhDEg==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "81a1727b-c846-4af9-8d95-1c50b1deb0d5", + "createdTimestamp" : 1675447845879, + "username" : "ppg.ba.project-lead", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "ppg.ba.project-lead@status.im", + "credentials" : [ { + "id" : "6411830d-6015-4cf2-bac6-d49c26510319", + "type" : "password", + "createdDate" : 1675447845915, + "secretData" : "{\"value\":\"1+m8twycOEbA4X61zN7dLENqp2IxxQZrXKaf3mEuzmxouHrgxvmXudwC6DWyfjXvLm7gxWlaa4cofBFwr1idig==\",\"salt\":\"UEKUSScYv2xY+rJ8vlvF4A==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "1d4d471a-b3ef-4750-97c4-a9e64eb8f414", + "createdTimestamp" : 1675447845942, + "username" : "ppg.ba.sme", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "ppg.ba.sme@status.im", + "credentials" : [ { + "id" : "6512f88a-cbcc-4d79-be17-1d132ba11e64", + "type" : "password", + "createdDate" : 1675447845977, + "secretData" : "{\"value\":\"EErx/3vG+lh4DgrJUzkBv4cLT3sK1gS+T9KD5V/JpvJUmJpRFQqpk+YxC/nC/kTGLIpRDdCIN690T84FlOIjew==\",\"salt\":\"FPeVGnFbt9TRNiORMB5LMQ==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "2dade29f-c6dc-445b-bdf0-eed316bdb638", + "createdTimestamp" : 1675447846003, + "username" : "ppg.ba.sme1", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "ppg.ba.sme1@status.im", + "credentials" : [ { + "id" : "ccf2d138-020a-4a29-b63d-1f4d2f415639", + "type" : "password", + "createdDate" : 1675447846038, + "secretData" : "{\"value\":\"BtSJtW/8lCtyrDPTXzhsyT/32H+pOHx9thKqJV30dOEZ9wcSQbrRSHoQbXwLos+sIiA82X3wm+qObdQoD5guVQ==\",\"salt\":\"nSbgxYpVGaMz2ArmqLCN6Q==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "c3ea06ee-c497-48e6-8816-43c8ef68bd8b", "createdTimestamp" : 1674148694747, @@ -1345,6 +1465,26 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "c21c075d-9ac5-40a1-964a-c1d6ffe17257", + "createdTimestamp" : 1675447845680, + "username" : "security.program-lead", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "security.program-lead@status.im", + "credentials" : [ { + "id" : "d1401dbd-a88b-44a6-b13c-fff13ee07e0c", + "type" : "password", + "createdDate" : 1675447845718, + "secretData" : "{\"value\":\"3D76RpIFG0/ixbSBeJfCc61kyL8PvVn/khA8FOy6RLg2hrZbs1Uwl8SmplnSUll1wD5a/BoobsO7v1XW4TCvwQ==\",\"salt\":\"YtDRRmBV4SBlO/oX23r2EQ==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "ace0432f-1818-4210-8bcf-15533abfb3ce", "createdTimestamp" : 1675349217958, @@ -1365,6 +1505,26 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "34dfacfd-24b5-414e-ac3e-9b013399aee2", + "createdTimestamp" : 1675447845747, + "username" : "security.project-lead", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "email" : "security.project-lead@status.im", + "credentials" : [ { + "id" : "cb5d8a8a-e7d0-40e4-878b-a33608cb76c8", + "type" : "password", + "createdDate" : 1675447845784, + "secretData" : "{\"value\":\"rudimVOjVwJeO/1RLuyHySEaSQMzjHqPQrh5Pmfr4L2PgP/1oDKLVB38pKOohlbTarDcbAfMHB7AFYAPn9kuIg==\",\"salt\":\"cOkkUBOx/4AVUSa3Ozsiuw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "notBefore" : 0, + "groups" : [ ] }, { "id" : "6272ac80-1d79-4e3c-a5c1-b31660560318", "createdTimestamp" : 1675349218020, @@ -1425,6 +1585,40 @@ "realmRoles" : [ "default-roles-spiffworkflow" ], "notBefore" : 0, "groups" : [ ] + }, { + "id" : "b768e3ef-f905-4493-976c-bc3408c04bec", + "createdTimestamp" : 1675447832524, + "username" : "service-account-spiffworkflow-backend", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "serviceAccountClientId" : "spiffworkflow-backend", + "credentials" : [ ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "clientRoles" : { + "spiffworkflow-backend" : [ "uma_protection" ] + }, + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "b6fb214b-cb8a-4403-9308-ac6d4e13ef26", + "createdTimestamp" : 1675447832560, + "username" : "service-account-withauth", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "serviceAccountClientId" : "withAuth", + "credentials" : [ ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-spiffworkflow" ], + "clientRoles" : { + "withAuth" : [ "uma_protection" ] + }, + "notBefore" : 0, + "groups" : [ ] }, { "id" : "3d45bb85-0a2d-4b15-8a19-d26a5619d359", "createdTimestamp" : 1674148694810, @@ -2658,7 +2852,7 @@ "subType" : "authenticated", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-address-mapper", "saml-user-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-full-name-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-full-name-mapper" ] } }, { "id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", @@ -2676,7 +2870,7 @@ "subType" : "anonymous", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "saml-role-list-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper", "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-address-mapper", "saml-user-attribute-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "saml-user-property-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper" ] } }, { "id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c", @@ -2766,7 +2960,7 @@ "internationalizationEnabled" : false, "supportedLocales" : [ ], "authenticationFlows" : [ { - "id" : "8facbab5-bca2-42c6-8608-ed94dacefe92", + "id" : "cb39eda2-18c2-4b03-9d7c-672a2bd47d19", "alias" : "Account verification options", "description" : "Method with which to verity the existing account", "providerId" : "basic-flow", @@ -2788,7 +2982,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "be52bd38-2def-41e7-a021-69bae78e92b7", + "id" : "96d4e28f-51ad-4737-87b4-5a10484ceb8b", "alias" : "Authentication Options", "description" : "Authentication options.", "providerId" : "basic-flow", @@ -2817,7 +3011,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "ee18f6d1-9ca3-4535-a7a0-9759f3841513", + "id" : "8f4c884d-93cd-4404-bc3a-1fa717b070c5", "alias" : "Browser - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2839,7 +3033,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "c76481eb-7997-4231-abac-632afd97631f", + "id" : "166d1879-dd61-4fb4-b4f6-0a4d69f49da8", "alias" : "Direct Grant - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2861,7 +3055,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "14fe94d2-f3ef-4349-9cbe-79921c013108", + "id" : "18cab8f9-f010-4226-a86e-8da2f1632304", "alias" : "First broker login - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2883,7 +3077,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "533c45e3-10d9-480b-9c9b-c2f746fb6f66", + "id" : "04d8d1d1-5253-4644-b55d-8c9317818b33", "alias" : "Handle Existing Account", "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", "providerId" : "basic-flow", @@ -2905,7 +3099,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "1161d043-26ba-420c-baed-b220bcef40f1", + "id" : "2bf21e1d-ff7e-4d52-8be7-31355945c302", "alias" : "Reset - Conditional OTP", "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", "providerId" : "basic-flow", @@ -2927,7 +3121,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "cbba8afb-920f-4ae0-85f3-6bc520485dc2", + "id" : "fa8636a5-9969-41a5-9fef-9c825cceb819", "alias" : "User creation or linking", "description" : "Flow for the existing/non-existing user alternatives", "providerId" : "basic-flow", @@ -2950,7 +3144,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7b349cd1-fb1c-4d04-b5b5-885352277562", + "id" : "8656a884-6645-40b5-b075-c40736e27811", "alias" : "Verify Existing Account by Re-authentication", "description" : "Reauthentication of existing account", "providerId" : "basic-flow", @@ -2972,7 +3166,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "de10b07d-98b5-483c-b193-b1b93229478f", + "id" : "0d88d334-bfa4-4cf1-9fa3-17d0df0151d1", "alias" : "browser", "description" : "browser based authentication", "providerId" : "basic-flow", @@ -3008,7 +3202,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "4504d37b-3a2d-4cc9-b300-29482d86c72e", + "id" : "9b195d67-e3e6-4983-8607-533b739ebd97", "alias" : "clients", "description" : "Base authentication for clients", "providerId" : "client-flow", @@ -3044,7 +3238,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "9d86bdff-ba8e-433a-8536-a49c0af5faf2", + "id" : "fd0273a1-f6f4-4df1-a057-54ac4e91f4a9", "alias" : "direct grant", "description" : "OpenID Connect Resource Owner Grant", "providerId" : "basic-flow", @@ -3073,7 +3267,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "546d31fc-a885-46eb-94bd-171d04f16a7c", + "id" : "b457cba8-ef31-473b-a481-c095b2f4eb48", "alias" : "docker auth", "description" : "Used by Docker clients to authenticate against the IDP", "providerId" : "basic-flow", @@ -3088,7 +3282,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "70e5d629-4338-4aec-8671-fc7cf4c450b1", + "id" : "97519504-fd69-4c08-bd27-15d26fbc9b76", "alias" : "first broker login", "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", "providerId" : "basic-flow", @@ -3111,7 +3305,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "7213dc19-6e0b-4241-bef6-2409346a2745", + "id" : "fc6a4468-1a78-410d-ac97-cf9f05814850", "alias" : "forms", "description" : "Username, password, otp and other auth forms.", "providerId" : "basic-flow", @@ -3133,7 +3327,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "f91a8499-8cf5-408c-b85d-40e85a3f6ee3", + "id" : "97a25d8a-25a0-4bf4-be6d-a6f019cf3a32", "alias" : "http challenge", "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", "providerId" : "basic-flow", @@ -3155,7 +3349,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "9ec3751c-619e-4edc-a14f-4ac9c60b056f", + "id" : "671e8ec7-af31-4c54-b6bb-96ebe69881de", "alias" : "registration", "description" : "registration flow", "providerId" : "basic-flow", @@ -3171,7 +3365,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "8048e711-8e77-4b85-8b26-243948a7c2f4", + "id" : "24d6aaaa-5202-4401-99c3-bb15925bd5be", "alias" : "registration form", "description" : "registration form", "providerId" : "form-flow", @@ -3207,7 +3401,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "5a08de49-dd24-4e53-a656-9fac52fc6d2b", + "id" : "f948bd43-ff05-4245-be30-a0a0dad2b7f0", "alias" : "reset credentials", "description" : "Reset credentials for a user if they forgot their password or something", "providerId" : "basic-flow", @@ -3243,7 +3437,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "42bc970f-3ee5-429c-a543-e8078808d371", + "id" : "7e4aaea7-05ca-4aa0-b934-4c81614620a8", "alias" : "saml ecp", "description" : "SAML ECP Profile Authentication Flow", "providerId" : "basic-flow", @@ -3259,13 +3453,13 @@ } ] } ], "authenticatorConfig" : [ { - "id" : "23f4f930-3290-4a63-ac96-f7ddc04fbce2", + "id" : "14ca1058-25e7-41f6-85ce-ad0bfce2c67c", "alias" : "create unique user config", "config" : { "require.password.update.after.registration" : "false" } }, { - "id" : "4cfa7fa4-1a9b-4464-9510-460208e345eb", + "id" : "16803de1-f7dc-4293-acde-fd0eae264377", "alias" : "review profile config", "config" : { "update.profile.on.first.login" : "missing" @@ -3360,4 +3554,4 @@ "clientPolicies" : { "policies" : [ ] } -} +} \ No newline at end of file diff --git a/spiffworkflow-backend/keycloak/test_user_lists/status b/spiffworkflow-backend/keycloak/test_user_lists/status index cb510747..66da936e 100644 --- a/spiffworkflow-backend/keycloak/test_user_lists/status +++ b/spiffworkflow-backend/keycloak/test_user_lists/status @@ -22,17 +22,25 @@ lead1@status.im lead@status.im legal.lead@status.im legal.program-lead.sme@status.im +legal.program-lead@status.im legal.project-lead.sme@status.im +legal.project-lead@status.im legal.sme@status.im legal1.sme@status.im manuchehr@status.im peopleops.partner@status.im peopleops.talent@status.im +ppg.ba.program-lead@status.im +ppg.ba.project-lead@status.im +ppg.ba.sme1@status.im +ppg.ba.sme@status.im ppg.ba@status.im program.lead@status.im sasha@status.im security.program-lead.sme@status.im +security.program-lead@status.im security.project-lead.sme@status.im +security.project-lead@status.im security.sme@status.im security1.sme@status.im services.lead@status.im diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py index 46d2ad54..886e138e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py @@ -215,10 +215,10 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response: # an event id or send out tags like username current_app.logger.exception(exception) else: - current_app.logger.error( + current_app.logger.warning( f"Received exception: {exception}. Since we do not want this particular" - " exception in sentry, we cannot use logger.exception, so there will be no" - " backtrace. see api_error.py" + " exception in sentry, we cannot use logger.exception or logger.error, so" + " there will be no backtrace. see api_error.py" ) error_code = "internal_server_error"