Merging main

This commit is contained in:
Dan 2023-02-27 14:17:10 -05:00
commit f65ed14df3
32 changed files with 3387 additions and 2653 deletions

34
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,34 @@
version: 2
updates:
- package-ecosystem: github-actions
directory: "/"
schedule:
interval: daily
# - package-ecosystem: pip
# directory: "/.github/workflows"
# schedule:
# interval: daily
# - package-ecosystem: pip
# directory: "/spiffworkflow-backend/docs"
# schedule:
# interval: daily
# - package-ecosystem: pip
# directory: "/"
# schedule:
# interval: daily
# - package-ecosystem: pip
# directory: "/spiffworkflow-backend"
# schedule:
# interval: daily
# - package-ecosystem: npm
# directory: "/.github/workflows"
# schedule:
# interval: daily
# - package-ecosystem: npm
# directory: "/spiffworkflow-frontend"
# schedule:
# interval: daily
# - package-ecosystem: npm
# directory: "/bpmn-js-spiffworkflow"
# schedule:
# interval: daily

View File

@ -0,0 +1,72 @@
name: Dependabot auto-merge
on:
workflow_run:
workflows: ["Backend Tests", "Frontend Tests"]
# completed does not mean success of Tests workflow. see below checking github.event.workflow_run.conclusion
types:
- completed
# workflow_call is used to indicate that a workflow can be called by another workflow. When a workflow is triggered with the workflow_call event, the event payload in the called workflow is the same event payload from the calling workflow. For more information see, "Reusing workflows."
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
# maybe hook into this instead of workflow_run:
# on:
# pull_request:
# pull_request_target:
# types: [labeled]
permissions:
contents: write
jobs:
# uncomment this to print the context for debugging in case a job is getting skipped
# printJob:
# name: Print event
# runs-on: ubuntu-latest
# steps:
# - name: Dump GitHub context
# env:
# GITHUB_CONTEXT: ${{ toJson(github) }}
# run: |
# echo "$GITHUB_CONTEXT"
dependabot:
runs-on: ubuntu-latest
if: ${{ github.actor == 'dependabot[bot]' && github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' }}
steps:
- name: Development Code
uses: actions/checkout@v3
###### GET PR NUMBER
# we saved the pr_number in tests.yml. fetch it so we can merge the correct PR.
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_run
- name: "Download artifact"
uses: actions/github-script@v6
with:
script: |
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: context.payload.workflow_run.id,
});
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
return artifact.name == "pr_number"
})[0];
let download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: 'zip',
});
let fs = require('fs');
fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/pr_number.zip`, Buffer.from(download.data));
- name: "Unzip artifact"
run: unzip pr_number.zip
###########
- name: print pr number
run: cat pr_number
- name: actually merge it
run: gh pr merge --auto --merge "$(cat pr_number)"
env:
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}

View File

@ -87,7 +87,7 @@ jobs:
steps:
- name: Check out the repository
uses: actions/checkout@v3.0.2
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ matrix.python }}
uses: actions/setup-python@v4.2.0
@ -119,29 +119,6 @@ jobs:
pipx inject --pip-args=--constraint=.github/workflows/constraints.txt nox nox-poetry
nox --version
# - name: Compute pre-commit cache key
# if: matrix.session == 'pre-commit'
# id: pre-commit-cache
# shell: python
# run: |
# import hashlib
# import sys
#
# python = "py{}.{}".format(*sys.version_info[:2])
# payload = sys.version.encode() + sys.executable.encode()
# digest = hashlib.sha256(payload).hexdigest()
# result = "${{ runner.os }}-{}-{}-pre-commit".format(python, digest[:8])
#
# print("::set-output name=result::{}".format(result))
#
# - name: Restore pre-commit cache
# uses: actions/cache@v3.0.11
# if: matrix.session == 'pre-commit'
# with:
# path: ~/.cache/pre-commit
# key: ${{ steps.pre-commit-cache.outputs.result }}-${{ hashFiles('.pre-commit-config.yaml') }}
# restore-keys: |
# ${{ steps.pre-commit-cache.outputs.result }}-
- name: Setup Mysql
uses: mirromutth/mysql-action@v1.1
with:
@ -190,7 +167,7 @@ jobs:
working-directory: .
steps:
- name: Check out the repository
uses: actions/checkout@v3.0.2
uses: actions/checkout@v3.3.0
with:
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
fetch-depth: 0
@ -211,7 +188,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out the repository
uses: actions/checkout@v3.0.2
uses: actions/checkout@v3.3.0
with:
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
fetch-depth: 0
@ -233,7 +210,7 @@ jobs:
needs: [tests, run_pre_commit_checks, check_docker_start_script]
steps:
- name: Check out the repository
uses: actions/checkout@v3.0.2
uses: actions/checkout@v3.3.0
with:
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
fetch-depth: 0
@ -319,7 +296,7 @@ jobs:
steps:
- name: Check out the repository
uses: actions/checkout@v3.0.2
uses: actions/checkout@v3.3.0
with:
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
fetch-depth: 0

View File

@ -0,0 +1,116 @@
name: Docker Image For Main Builds
# we want to be able to sort by tag name to find the newest and trace back to source control
# on every commit to main:
# frontend:main-20230223164322-b8becd1-45
# frontend:main-latest
# we settled on:
# main-2023-02-24_16-16-40
# because the labels on the docker image itself have the git sha and everything else :)
# on every tag:
# frontend:latest
#
# Example docker image labels:
# "Labels": {
# "description": "Software development platform for building, running, and monitoring executable diagrams",
# "org.opencontainers.image.created": "2023-02-24T16:43:00.844Z",
# "org.opencontainers.image.description": "",
# "org.opencontainers.image.licenses": "LGPL-2.1",
# "org.opencontainers.image.revision": "54064a050fbf9f366648f0f2e2c60ce244fcc421",
# "org.opencontainers.image.source": "https://github.com/sartography/spiff-arena",
# "org.opencontainers.image.title": "spiff-arena",
# "org.opencontainers.image.url": "https://github.com/sartography/spiff-arena",
# "org.opencontainers.image.version": "main-latest",
# "source": "https://github.com/sartography/spiff-arena"
# }
#
# Git tags for an image:
# curl -H "Authorization: Bearer $(echo -n $TOKEN | base64 -w0)" https://ghcr.io/v2/sartography/spiffworkflow-backend/tags/list | jq -r '.tags | sort_by(.)'
on:
push:
branches:
- main
jobs:
create_frontend_docker_image:
runs-on: ubuntu-latest
env:
REGISTRY: ghcr.io
IMAGE_NAME: sartography/spiffworkflow-frontend
permissions:
contents: read
packages: write
steps:
- name: Check out the repository
uses: actions/checkout@v3.3.0
- name: Log in to the Container registry
uses: docker/login-action@v2.1.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Get current date
id: date
run: echo "date=$(date -u +'%Y-%m-%d_%H-%M-%S')" >> $GITHUB_OUTPUT
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v4.3.0
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=ref,event=branch,suffix=-latest
type=ref,event=branch,suffix=-${{ steps.date.outputs.date }}
- name: Build and push Frontend Docker image
uses: docker/build-push-action@v4.0.0
with:
# this action doesn't seem to respect working-directory so set context
context: spiffworkflow-frontend
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Adding markdown
run: echo 'TAGS ${{ steps.meta.outputs.tags }}' >> $GITHUB_STEP_SUMMARY
create_backend_docker_image:
runs-on: ubuntu-latest
env:
REGISTRY: ghcr.io
IMAGE_NAME: sartography/spiffworkflow-backend
permissions:
contents: read
packages: write
steps:
- name: Check out the repository
uses: actions/checkout@v3.3.0
- name: Log in to the Container registry
uses: docker/login-action@v2.1.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Get current date
id: date
run: echo "date=$(date -u +'%Y-%m-%d_%H-%M-%S')" >> $GITHUB_OUTPUT
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v4.3.0
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=ref,event=branch,suffix=-latest
type=ref,event=branch,suffix=-${{ steps.date.outputs.date }}
- name: Build and push Backend Docker image
uses: docker/build-push-action@v4.0.0
with:
# this action doesn't seem to respect working-directory so set context
context: spiffworkflow-backend
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Adding markdown
run: echo 'TAGS ${{ steps.meta.outputs.tags }}' >> $GITHUB_STEP_SUMMARY

View File

@ -5,7 +5,7 @@ on:
tags: [ v* ]
jobs:
create_frontend_docker_container:
create_frontend_docker_image:
runs-on: ubuntu-latest
env:
REGISTRY: ghcr.io
@ -15,12 +15,9 @@ jobs:
packages: write
steps:
- name: Check out the repository
uses: actions/checkout@v3.0.2
with:
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
fetch-depth: 0
uses: actions/checkout@v3.3.0
- name: Log in to the Container registry
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
uses: docker/login-action@v2.1.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
@ -28,13 +25,12 @@ jobs:
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
uses: docker/metadata-action@v4.3.0
with:
context: spiffworkflow-frontend
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- name: Build and push Frontend Docker image
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
uses: docker/build-push-action@v4.0.0
with:
# this action doesn't seem to respect working-directory so set context
context: spiffworkflow-frontend
@ -42,7 +38,7 @@ jobs:
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
create_backend_docker_container:
create_backend_docker_image:
runs-on: ubuntu-latest
env:
REGISTRY: ghcr.io
@ -52,12 +48,9 @@ jobs:
packages: write
steps:
- name: Check out the repository
uses: actions/checkout@v3.0.2
with:
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
fetch-depth: 0
uses: actions/checkout@v3.3.0
- name: Log in to the Container registry
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
uses: docker/login-action@v2.1.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
@ -65,12 +58,12 @@ jobs:
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
uses: docker/metadata-action@v4.3.0
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- name: Build and push Backend Docker image
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
uses: docker/build-push-action@v4.0.0
with:
# this action doesn't seem to respect working-directory so set context
context: spiffworkflow-backend
@ -89,12 +82,9 @@ jobs:
packages: write
steps:
- name: Check out the repository
uses: actions/checkout@v3.0.2
with:
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
fetch-depth: 0
uses: actions/checkout@v3.3.0
- name: Log in to the Container registry
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
uses: docker/login-action@v2.1.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
@ -102,13 +92,12 @@ jobs:
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
uses: docker/metadata-action@v4.3.0
with:
context: connector-proxy-demo
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- name: Build and push the connector proxy
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
uses: docker/build-push-action@v4.0.0
with:
# this action doesn't seem to respect working-directory so set context
context: connector-proxy-demo
@ -118,7 +107,7 @@ jobs:
quickstart-guide-test:
runs-on: ubuntu-latest
needs: [create_frontend_docker_container, create_backend_docker_container, create_demo-proxy]
needs: [create_frontend_docker_image, create_backend_docker_image, create_demo-proxy]
steps:
- name: Checkout
uses: actions/checkout@v3

View File

@ -16,26 +16,29 @@
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import re
import xml.dom.minidom as minidom
from .. import operators
from ..specs.Simple import Simple
from ..specs.WorkflowSpec import WorkflowSpec
from ..exceptions import StorageException
from ..exceptions import SpiffWorkflowException
from .base import Serializer, spec_map, op_map
# Create a list of tag names out of the spec names.
_spec_map = spec_map()
_op_map = op_map()
_exc = StorageException
class XMLParserExcetion(SpiffWorkflowException):
pass
class XmlSerializer(Serializer):
"""Parses XML into a WorkflowSpec object."""
"""
Parses XML into a WorkflowSpec object.
"""
# Note: This is not a serializer. It is a parser for Spiff's XML format
# However, it is too disruptive to rename everything that uses it.
def raise_parser_exception(self, message):
raise XMLParserExcetion(message)
def deserialize_assign(self, workflow, start_node):
"""
@ -43,17 +46,18 @@ class XmlSerializer(Serializer):
start_node -- the xml node (xml.dom.minidom.Node)
"""
name = start_node.getAttribute('name')
attrib = start_node.getAttribute('field')
value = start_node.getAttribute('value')
name = start_node.attrib.get('name')
attrib = start_node.attrib.get('field')
value = start_node.attrib.get('value')
kwargs = {}
if name == '':
_exc('name attribute required')
if attrib != '' and value != '':
_exc('Both, field and right-value attributes found')
elif attrib == '' and value == '':
_exc('field or value attribute required')
elif value != '':
self.raise_parser_exception('name attribute required')
if attrib is not None and value is not None:
self.raise_parser_exception('Both, field and right-value attributes found')
elif attrib is None and value is None:
self.raise_parser_exception('field or value attribute required')
elif value is not None:
kwargs['right'] = value
else:
kwargs['right_attribute'] = attrib
@ -65,8 +69,8 @@ class XmlSerializer(Serializer):
start_node -- the xml node (xml.dom.minidom.Node)
"""
name = start_node.getAttribute('name')
value = start_node.getAttribute('value')
name = start_node.attrib.get('name')
value = start_node.attrib.get('value')
return name, value
def deserialize_assign_list(self, workflow, start_node):
@ -78,13 +82,13 @@ class XmlSerializer(Serializer):
"""
# Collect all information.
assignments = []
for node in start_node.childNodes:
if node.nodeType != minidom.Node.ELEMENT_NODE:
continue
if node.nodeName.lower() == 'assign':
for node in start_node.getchildren():
if not isinstance(start_node.tag, str):
pass
elif node.tag.lower() == 'assign':
assignments.append(self.deserialize_assign(workflow, node))
else:
_exc('Unknown node: %s' % node.nodeName)
self.raise_parser_exception('Unknown node: %s' % node.tag)
return assignments
def deserialize_logical(self, node):
@ -93,26 +97,26 @@ class XmlSerializer(Serializer):
node -- the xml node (xml.dom.minidom.Node)
"""
term1_attrib = node.getAttribute('left-field')
term1_value = node.getAttribute('left-value')
op = node.nodeName.lower()
term2_attrib = node.getAttribute('right-field')
term2_value = node.getAttribute('right-value')
term1_attrib = node.attrib.get('left-field')
term1_value = node.attrib.get('left-value')
op = node.tag.lower()
term2_attrib = node.attrib.get('right-field')
term2_value = node.attrib.get('right-value')
if op not in _op_map:
_exc('Invalid operator')
if term1_attrib != '' and term1_value != '':
_exc('Both, left-field and left-value attributes found')
elif term1_attrib == '' and term1_value == '':
_exc('left-field or left-value attribute required')
elif term1_value != '':
self.raise_parser_exception('Invalid operator')
if term1_attrib is not None and term1_value is not None:
self.raise_parser_exception('Both, left-field and left-value attributes found')
elif term1_attrib is None and term1_value is None:
self.raise_parser_exception('left-field or left-value attribute required')
elif term1_value is not None:
left = term1_value
else:
left = operators.Attrib(term1_attrib)
if term2_attrib != '' and term2_value != '':
_exc('Both, right-field and right-value attributes found')
elif term2_attrib == '' and term2_value == '':
_exc('right-field or right-value attribute required')
elif term2_value != '':
if term2_attrib is not None and term2_value is not None:
self.raise_parser_exception('Both, right-field and right-value attributes found')
elif term2_attrib is None and term2_value is None:
self.raise_parser_exception('right-field or right-value attribute required')
elif term2_value is not None:
right = term2_value
else:
right = operators.Attrib(term2_attrib)
@ -128,26 +132,26 @@ class XmlSerializer(Serializer):
# Collect all information.
condition = None
spec_name = None
for node in start_node.childNodes:
if node.nodeType != minidom.Node.ELEMENT_NODE:
continue
if node.nodeName.lower() == 'successor':
for node in start_node.getchildren():
if not isinstance(node.tag, str):
pass
elif node.tag.lower() == 'successor':
if spec_name is not None:
_exc('Duplicate task name %s' % spec_name)
if node.firstChild is None:
_exc('Successor tag without a task name')
spec_name = node.firstChild.nodeValue
elif node.nodeName.lower() in _op_map:
self.raise_parser_exception('Duplicate task name %s' % spec_name)
if node.text is None:
self.raise_parser_exception('Successor tag without a task name')
spec_name = node.text
elif node.tag.lower() in _op_map:
if condition is not None:
_exc('Multiple conditions are not yet supported')
self.raise_parser_exception('Multiple conditions are not yet supported')
condition = self.deserialize_logical(node)
else:
_exc('Unknown node: %s' % node.nodeName)
self.raise_parser_exception('Unknown node: %s' % node.tag)
if condition is None:
_exc('Missing condition in conditional statement')
self.raise_parser_exception('Missing condition in conditional statement')
if spec_name is None:
_exc('A %s has no task specified' % start_node.nodeName)
self.raise_parser_exception('A %s has no task specified' % start_node.tag)
return condition, spec_name
def deserialize_task_spec(self, workflow, start_node, read_specs):
@ -160,31 +164,31 @@ class XmlSerializer(Serializer):
start_node -- the xml structure (xml.dom.minidom.Node)
"""
# Extract attributes from the node.
nodetype = start_node.nodeName.lower()
name = start_node.getAttribute('name').lower()
context = start_node.getAttribute('context').lower()
mutex = start_node.getAttribute('mutex').lower()
cancel = start_node.getAttribute('cancel').lower()
success = start_node.getAttribute('success').lower()
times = start_node.getAttribute('times').lower()
times_field = start_node.getAttribute('times-field').lower()
threshold = start_node.getAttribute('threshold').lower()
threshold_field = start_node.getAttribute('threshold-field').lower()
file_name = start_node.getAttribute('file').lower()
file_field = start_node.getAttribute('file-field').lower()
nodetype = start_node.tag.lower()
name = start_node.attrib.get('name', '').lower()
context = start_node.attrib.get('context', '').lower()
mutex = start_node.attrib.get('mutex', '').lower()
cancel = start_node.attrib.get('cancel', '').lower()
success = start_node.attrib.get('success', '').lower()
times = start_node.attrib.get('times', '').lower()
times_field = start_node.attrib.get('times-field', '').lower()
threshold = start_node.attrib.get('threshold', '').lower()
threshold_field = start_node.attrib.get('threshold-field', '').lower()
file_name = start_node.attrib.get('file', '').lower()
file_field = start_node.attrib.get('file-field', '').lower()
kwargs = {'lock': [],
'data': {},
'defines': {},
'pre_assign': [],
'post_assign': []}
if nodetype not in _spec_map:
_exc('Invalid task type "%s"' % nodetype)
self.raise_parser_exception('Invalid task type "%s"' % nodetype)
if nodetype == 'start-task':
name = 'start'
if name == '':
_exc('Invalid task name "%s"' % name)
self.raise_parser_exception('Invalid task name "%s"' % name)
if name in read_specs:
_exc('Duplicate task name "%s"' % name)
self.raise_parser_exception('Duplicate task name "%s"' % name)
if cancel != '' and cancel != '0':
kwargs['cancel'] = True
if success != '' and success != '0':
@ -210,55 +214,55 @@ class XmlSerializer(Serializer):
# Walk through the children of the node.
successors = []
for node in start_node.childNodes:
if node.nodeType != minidom.Node.ELEMENT_NODE:
continue
if node.nodeName == 'description':
kwargs['description'] = node.firstChild.nodeValue
elif node.nodeName == 'successor' \
or node.nodeName == 'default-successor':
if node.firstChild is None:
_exc('Empty %s tag' % node.nodeName)
successors.append((None, node.firstChild.nodeValue))
elif node.nodeName == 'conditional-successor':
for node in start_node.getchildren():
if not isinstance(node.tag, str):
pass
elif node.tag == 'description':
kwargs['description'] = node.text
elif node.tag == 'successor' \
or node.tag == 'default-successor':
if not node.text:
self.raise_parser_exception('Empty %s tag' % node.tag)
successors.append((None, node.text))
elif node.tag == 'conditional-successor':
successors.append(self.deserialize_condition(workflow, node))
elif node.nodeName == 'define':
elif node.tag == 'define':
key, value = self.deserialize_data(workflow, node)
kwargs['defines'][key] = value
# "property" tag exists for backward compatibility.
elif node.nodeName == 'data' or node.nodeName == 'property':
elif node.tag == 'data' or node.tag == 'property':
key, value = self.deserialize_data(workflow, node)
kwargs['data'][key] = value
elif node.nodeName == 'pre-assign':
elif node.tag == 'pre-assign':
kwargs['pre_assign'].append(
self.deserialize_assign(workflow, node))
elif node.nodeName == 'post-assign':
elif node.tag == 'post-assign':
kwargs['post_assign'].append(
self.deserialize_assign(workflow, node))
elif node.nodeName == 'in':
elif node.tag == 'in':
kwargs['in_assign'] = self.deserialize_assign_list(
workflow, node)
elif node.nodeName == 'out':
elif node.tag == 'out':
kwargs['out_assign'] = self.deserialize_assign_list(
workflow, node)
elif node.nodeName == 'cancel':
if node.firstChild is None:
_exc('Empty %s tag' % node.nodeName)
elif node.tag == 'cancel':
if not node.text:
self.raise_parser_exception('Empty %s tag' % node.tag)
if context == '':
context = []
elif not isinstance(context, list):
context = [context]
context.append(node.firstChild.nodeValue)
elif node.nodeName == 'lock':
if node.firstChild is None:
_exc('Empty %s tag' % node.nodeName)
kwargs['lock'].append(node.firstChild.nodeValue)
elif node.nodeName == 'pick':
if node.firstChild is None:
_exc('Empty %s tag' % node.nodeName)
kwargs['choice'].append(node.firstChild.nodeValue)
context.append(node.text)
elif node.tag == 'lock':
if not node.text:
self.raise_parser_exception('Empty %s tag' % node.tag)
kwargs['lock'].append(node.text)
elif node.tag == 'pick':
if not node.text:
self.raise_parser_exception('Empty %s tag' % node.tag)
kwargs['choice'].append(node.text)
else:
_exc('Unknown node: %s' % node.nodeName)
self.raise_parser_exception('Unknown node: %s' % node.tag)
# Create a new instance of the task spec.
module = _spec_map[nodetype]
@ -266,9 +270,9 @@ class XmlSerializer(Serializer):
spec = module(workflow, **kwargs)
elif nodetype == 'multi-instance' or nodetype == 'thread-split':
if times == '' and times_field == '':
_exc('Missing "times" or "times-field" in "%s"' % name)
self.raise_parser_exception('Missing "times" or "times-field" in "%s"' % name)
elif times != '' and times_field != '':
_exc('Both, "times" and "times-field" in "%s"' % name)
self.raise_parser_exception('Both, "times" and "times-field" in "%s"' % name)
spec = module(workflow, name, **kwargs)
elif context == '':
spec = module(workflow, name, **kwargs)
@ -277,34 +281,31 @@ class XmlSerializer(Serializer):
read_specs[name] = spec, successors
def deserialize_workflow_spec(self, s_state, filename=None):
def deserialize_workflow_spec(self, root_node, filename=None):
"""
Reads the workflow from the given XML structure and returns a
WorkflowSpec instance.
"""
dom = minidom.parseString(s_state)
node = dom.getElementsByTagName('process-definition')[0]
name = node.getAttribute('name')
name = root_node.attrib.get('name')
if name == '':
_exc('%s without a name attribute' % node.nodeName)
self.raise_parser_exception('%s without a name attribute' % root_node.tag)
# Read all task specs and create a list of successors.
workflow_spec = WorkflowSpec(name, filename)
del workflow_spec.task_specs['Start']
end = Simple(workflow_spec, 'End'), []
read_specs = dict(end=end)
for child_node in node.childNodes:
if child_node.nodeType != minidom.Node.ELEMENT_NODE:
continue
if child_node.nodeName == 'name':
workflow_spec.name = child_node.firstChild.nodeValue
elif child_node.nodeName == 'description':
workflow_spec.description = child_node.firstChild.nodeValue
elif child_node.nodeName.lower() in _spec_map:
self.deserialize_task_spec(
workflow_spec, child_node, read_specs)
for child_node in root_node.getchildren():
if not isinstance(child_node.tag, str):
pass
elif child_node.tag == 'name':
workflow_spec.name = child_node.text
elif child_node.tag == 'description':
workflow_spec.description = child_node.text
elif child_node.tag.lower() in _spec_map:
self.deserialize_task_spec(workflow_spec, child_node, read_specs)
else:
_exc('Unknown node: %s' % child_node.nodeName)
self.raise_parser_exception('Unknown node: %s' % child_node.tag)
# Remove the default start-task from the workflow.
workflow_spec.start = read_specs['start'][0]
@ -314,7 +315,7 @@ class XmlSerializer(Serializer):
spec, successors = read_specs[name]
for condition, successor_name in successors:
if successor_name not in read_specs:
_exc('Unknown successor: "%s"' % successor_name)
self.raise_parser_exception('Unknown successor: "%s"' % successor_name)
successor, foo = read_specs[successor_name]
if condition is None:
spec.connect(successor)

View File

@ -18,6 +18,8 @@
# 02110-1301 USA
import os
from lxml import etree
from .StartTask import StartTask
from .base import TaskSpec
from ..task import TaskState
@ -93,9 +95,8 @@ class SubWorkflow(TaskSpec):
file_name = valueof(my_task, self.file)
serializer = XmlSerializer()
with open(file_name) as fp:
xml = fp.read()
wf_spec = WorkflowSpec.deserialize(
serializer, xml, filename=file_name)
xml = etree.parse(fp).getroot()
wf_spec = WorkflowSpec.deserialize(serializer, xml, filename=file_name)
outer_workflow = my_task.workflow.outer_workflow
return Workflow(wf_spec, parent=outer_workflow)

View File

@ -6,6 +6,8 @@ import unittest
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
from lxml import etree
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
from SpiffWorkflow.task import Task
from SpiffWorkflow.serializer.prettyxml import XmlSerializer
@ -64,7 +66,7 @@ class PatternTest(unittest.TestCase):
# Test patterns that are defined in XML format.
if filename.endswith('.xml'):
with open(filename) as fp:
xml = fp.read()
xml = etree.parse(fp).getroot()
serializer = XmlSerializer()
wf_spec = WorkflowSpec.deserialize(
serializer, xml, filename=filename)

View File

@ -6,6 +6,8 @@ import os
data_dir = os.path.join(os.path.dirname(__file__), 'data')
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
from lxml import etree
from SpiffWorkflow.workflow import Workflow
from SpiffWorkflow.specs.Cancel import Cancel
from SpiffWorkflow.specs.Simple import Simple
@ -27,7 +29,7 @@ class WorkflowTest(unittest.TestCase):
"""
xml_file = os.path.join(data_dir, 'spiff', 'workflow1.xml')
with open(xml_file) as fp:
xml = fp.read()
xml = etree.parse(fp).getroot()
wf_spec = WorkflowSpec.deserialize(XmlSerializer(), xml)
workflow = Workflow(wf_spec)

View File

@ -4,6 +4,8 @@ import sys
import unittest
import os
from lxml import etree
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
@ -30,7 +32,7 @@ class TaskSpecTest(unittest.TestCase):
os.path.dirname(__file__), '..', 'data', 'spiff', folder, f)
serializer = XmlSerializer()
with open(file) as fp:
xml = fp.read()
xml = etree.parse(fp).getroot()
self.wf_spec = WorkflowSpec.deserialize(
serializer, xml, filename=file)
self.workflow = Workflow(self.wf_spec)

View File

@ -8,6 +8,8 @@ import unittest
data_dir = os.path.join(os.path.dirname(__file__), '..', 'data')
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
from lxml import etree
import pickle
from random import randint
try:
@ -82,7 +84,7 @@ class WorkflowSpecTest(unittest.TestCase):
# Read a complete workflow spec.
xml_file = os.path.join(data_dir, 'spiff', 'workflow1.xml')
with open(xml_file) as fp:
xml = fp.read()
xml = etree.parse(fp).getroot()
path_file = os.path.splitext(xml_file)[0] + '.path'
with open(path_file) as fp:
expected_path = fp.read().strip().split('\n')

View File

@ -41,13 +41,13 @@ if [[ "${1:-}" == "clean" ]]; then
# TODO: check to see if the db already exists and we can connect to it. also actually clean it up.
# start postgres in background with one db
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-}" == "postgres" ]]; then
if ! docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_testing -c "select 1"; then
docker run --name postgres-spiff -p 5432:5432 -e POSTGRES_PASSWORD=spiffworkflow_backend -e POSTGRES_USER=spiffworkflow_backend -e POSTGRES_DB=spiffworkflow_backend_testing -d postgres
if ! docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_unit_testing -c "select 1"; then
docker run --name postgres-spiff -p 5432:5432 -e POSTGRES_PASSWORD=spiffworkflow_backend -e POSTGRES_USER=spiffworkflow_backend -e POSTGRES_DB=spiffworkflow_backend_unit_testing -d postgres
sleep 4 # classy
fi
if ! docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_local_development -c "select 1"; then
# create other db. spiffworkflow_backend_testing came with the docker run.
docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_testing -c "create database spiffworkflow_backend_local_development;"
# create other db. spiffworkflow_backend_unit_testing came with the docker run.
docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_unit_testing -c "create database spiffworkflow_backend_local_development;"
fi
fi
elif [[ "${1:-}" == "migrate" ]]; then

View File

@ -23,10 +23,14 @@ fi
if [[ -z "${KEYCLOAK_BASE_URL:-}" ]]; then
KEYCLOAK_BASE_URL=http://localhost:7002
fi
if [[ -z "${ADMIN_USERNAME:-}" ]]; then
ADMIN_USERNAME="admin"
fi
if [[ -z "${ADMIN_PASSWORD:-}" ]]; then
ADMIN_PASSWORD="admin"
fi
REALM_NAME="$keycloak_realm"
ADMIN_USERNAME="admin"
ADMIN_PASSWORD="admin"
SECURE=false
KEYCLOAK_URL=$KEYCLOAK_BASE_URL/realms/$REALM_NAME/protocol/openid-connect/token

View File

@ -484,21 +484,21 @@
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "27b5bdce-1c02-4249-b8ba-521f9bcae2d3",
"createdTimestamp" : 1676302139921,
"username" : "app.program.lead",
"id" : "d959fd73-92b5-43f4-a210-9457c0b89296",
"createdTimestamp" : 1677187934315,
"username" : "app.program-lead",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "app.program.lead@status.im",
"email" : "app.program-lead@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "121" ]
},
"credentials" : [ {
"id" : "8cd62c66-7357-4c8f-ae57-e45a10150f2d",
"id" : "d959fd73-92b5-43f4-a210-9457c0b89296",
"type" : "password",
"createdDate" : 1676302139956,
"secretData" : "{\"value\":\"NhRRaTaL4o8TLmLgFrfIlLo1lBGRgAcoQ+ct7ypw/osYNXcF1zIC7i0AYrwrSSWQ60Wxcx6RZTFRQsZobwCbUw==\",\"salt\":\"nOhBgYVO/Me08wmfOatRdQ==\",\"additionalParameters\":{}}",
"createdDate" : 1677187934366,
"secretData" : "{\"value\":\"6njfc7gdZ1NTsmiyMXOztog8H7yKDSYgBsCFjTod0IszE0zq3WrekGKuT3GDHTHE5xVLO0SZbDQ4V5uRm0auPQ==\",\"salt\":\"eNwudU7v/gvIFX/WNtPu9w==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
@ -531,6 +531,167 @@
},
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "7721b278-b117-45c6-9e98-d66efa6272a4",
"createdTimestamp" : 1677187934488,
"username" : "codex.project-lead",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "codex.project-lead@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "153" ]
},
"credentials" : [ {
"id" : "4ed0c40f-bd6f-41a2-87c0-f35e826d196c",
"type" : "password",
"createdDate" : 1677187934523,
"secretData" : "{\"value\":\"0xkk4BBlMNVl/xL2b4KLf25PP9h8uY1d2n9kTwEJVm0oOhqnaSEpyKTGlS+oV33DhpNnBDqME922xP+j8kYNgQ==\",\"salt\":\"g20ITxwFU1PnkD4LGdEeIA==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "5e2a535e-056e-485c-b0af-c49bf0d64106",
"createdTimestamp" : 1677181799609,
"username" : "codex.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "codex.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "185" ]
},
"credentials" : [ {
"id" : "a2cf9daf-25d2-4cd2-b932-4706442a8437",
"type" : "password",
"createdDate" : 1677181799644,
"secretData" : "{\"value\":\"UY+PfYh5h48i40Klq0KEPVc0DBUrGRxI70BFcs98MD8R7ORJ5G6rWKA3Dq/5I8btu3CJI4PbFeTS/IopMhB7vQ==\",\"salt\":\"mtx4JqI61nsCni3s26PMJg==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "84e8eebf-59ca-466d-8523-2da0aef088ed",
"createdTimestamp" : 1677181799762,
"username" : "codex1.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "codex1.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "186" ]
},
"credentials" : [ {
"id" : "cace5730-6cd3-4d19-b0e4-10078fc5024a",
"type" : "password",
"createdDate" : 1677181799797,
"secretData" : "{\"value\":\"QwHtrufirwh38UBlalAikD+dqDo3Bnsp5350OBClcmv7QSlPQ/MqVppRfZXLaseIBbzvnuAjCxmrwtE8ERoy2g==\",\"salt\":\"0LkJgwINFOuVQGvHFp7GVA==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "ffaa3c6f-d6bc-4920-81b8-39d842f57ac5",
"createdTimestamp" : 1677181799898,
"username" : "codex2.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "codex2.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "187" ]
},
"credentials" : [ {
"id" : "8c8b872b-86cf-40c8-84a3-f432e0bebee4",
"type" : "password",
"createdDate" : 1677181799933,
"secretData" : "{\"value\":\"IGE1BnNopOP7OJIi5e8AUxT6ZUolat3TkheXZ030xqabu81VdAFYjRKKsrhSf39t9T9ze3d3wHZ0+xI76yxh5Q==\",\"salt\":\"KD8gdrC8seSWEPUJJHKLDw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "7393f1d8-e58c-4b80-8664-6f80931deb7b",
"createdTimestamp" : 1677181800044,
"username" : "codex3.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "codex3.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "188" ]
},
"credentials" : [ {
"id" : "ba8252cc-5900-4f5a-8c7e-590b2028ebd0",
"type" : "password",
"createdDate" : 1677181800080,
"secretData" : "{\"value\":\"HrlyO6uWQp615hB9eLdfl5W7ooTw8fZU+jwyFyUsUdIP+HJ2Es4Cu46bJ9Hgdnd7pmuGUma0C/xXR7EGNdvH9w==\",\"salt\":\"XVbQSX3HYRMIqCTyPJmQZw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "a862432c-cf03-4282-b0af-7dff20bfaca6",
"createdTimestamp" : 1677181800213,
"username" : "codex4.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "codex4.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "189" ]
},
"credentials" : [ {
"id" : "43f9f0dd-bae5-4e5b-9c8d-d067e203a1a3",
"type" : "password",
"createdDate" : 1677181800248,
"secretData" : "{\"value\":\"J56SkiE1uYDbA/3k1bFdQzauQG9AYWrR4gZoBTKT/acbOP+p5r0wpZ9BkotDc/R3X9q1KxYx3xU/8BjjZEebwQ==\",\"salt\":\"djpJqi+BXbc2jq+bnthlKw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "b4239cc1-cc70-4224-bd1c-e89e7667dc5a",
"createdTimestamp" : 1677181800350,
"username" : "codex5.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "codex5.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "190" ]
},
"credentials" : [ {
"id" : "01585949-5171-4bd6-8193-521c60a1c5b0",
"type" : "password",
"createdDate" : 1677181800384,
"secretData" : "{\"value\":\"VMRw0Z1VZn1vpObUDJu/sKqigkAmdClroJCMNh4msPa8gj13+3KLKrP0xvkFz52PI+3zneb21Mj1FDxlwfzBtg==\",\"salt\":\"+HaiDG8H7DC5XapT0PAARQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "6151b58a-ca4f-44e6-a82a-f13363234555",
"createdTimestamp" : 1676302140070,
@ -669,6 +830,29 @@
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "9b7820b2-ad02-431f-a603-2d9b7d4415c8",
"createdTimestamp" : 1677181801624,
"username" : "core6.contributor",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "core6.contributor@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "199" ]
},
"credentials" : [ {
"id" : "b6cc5352-e173-44e2-a37d-3607b606ab1b",
"type" : "password",
"createdDate" : 1677181801659,
"secretData" : "{\"value\":\"ZIjW8sUAJ5AczMOy+3Jgq82F0hvXqWmcLsmVY88hgVr4rkdjMu0+oOv36OfLFeFNwJrNxQAAots7RGuAyPbZQg==\",\"salt\":\"y6SgpBIdSuEzeJpeFx7/GQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "7b9767ac-24dc-43b0-838f-29e16b4fd14e",
"createdTimestamp" : 1675718483773,
@ -752,6 +936,29 @@
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "fae611e8-cde1-4fa1-b653-c6bef8a8c26c",
"createdTimestamp" : 1677181800520,
"username" : "desktop.project-lead",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "desktop.project-lead@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "192" ]
},
"credentials" : [ {
"id" : "8bc1602b-dceb-4a59-9809-68cb28ff8928",
"type" : "password",
"createdDate" : 1677181800557,
"secretData" : "{\"value\":\"MFB6lcRCnLoXHXMfPDFbDoQSSXmCsZUFetlI+VJVyMieBXesUrBsYC2XrBQX/bg/jI7569Z26ppsh1VtKxrBmw==\",\"salt\":\"f2CuJRGCdmB4QMguj4jMdQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "a71e29a7-678c-4a08-9273-5c8490577c98",
"createdTimestamp" : 1676302141251,
@ -772,6 +979,144 @@
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "78e0a909-3634-43f3-80b0-034aa1ddc01d",
"createdTimestamp" : 1677181800708,
"username" : "desktop.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "desktop.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "193" ]
},
"credentials" : [ {
"id" : "cf167058-268f-42da-94bc-01b35a562f5f",
"type" : "password",
"createdDate" : 1677181800744,
"secretData" : "{\"value\":\"IaSxg2RlpOnwutRGE7QPNVJtmA3klsizOGJq/g+dxAtOYweS1gYlWBFX4EB5zzAfB3gsA3P6gq+2avSK+besNw==\",\"salt\":\"AiM8CxndaAemRW8BQ/r4fw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "38924acc-ac03-4dca-8394-3917121f7509",
"createdTimestamp" : 1677181800877,
"username" : "desktop1.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "desktop1.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "194" ]
},
"credentials" : [ {
"id" : "204a79e9-6912-4ba9-a0f9-f001ed343242",
"type" : "password",
"createdDate" : 1677181800914,
"secretData" : "{\"value\":\"id13Cma1swB0HDj61wGA7xEIjWN8YKC1qA1WEP4ccV9frIm75xlyBGzwerQg9acNeu1Cltt2m1PDa8pE5ehw+g==\",\"salt\":\"baZl2HLuriksSDppoo/VjA==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "68e45305-dfcc-4ecc-8e62-8d838c46cf56",
"createdTimestamp" : 1677181801035,
"username" : "desktop2.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "desktop2.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "195" ]
},
"credentials" : [ {
"id" : "586b4314-bfc5-44c0-b1ec-bc8250a546e4",
"type" : "password",
"createdDate" : 1677181801070,
"secretData" : "{\"value\":\"B/7DfIn/ZzJMhzJKZnPQ6oFqQJv/jfRunWDu16TDcfCXXSOlJMmdn2R1yYSSL+hGgDYpaOT86woq0en67uFhnA==\",\"salt\":\"znRgPUHANthkIwXrcOnynQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "8b569875-5265-47bc-b4f9-74764e64fbb9",
"createdTimestamp" : 1677181801182,
"username" : "desktop3.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "desktop3.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "196" ]
},
"credentials" : [ {
"id" : "b7444657-1937-49c4-b48d-15cd69caec47",
"type" : "password",
"createdDate" : 1677181801216,
"secretData" : "{\"value\":\"iqUzNvgmigp4hgRO4j9rKUvdC/Qa2tLjGJdf5Mf2UieQqBZlqTt0EF/FielwV+D4qYDswcf7Lx9Kyc6sDkOX7g==\",\"salt\":\"113PrU+Thd35/KNKcz1bBg==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "0da7c2a9-f41b-4fdf-b54b-d2c425b18994",
"createdTimestamp" : 1677181801321,
"username" : "desktop4.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "desktop4.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "197" ]
},
"credentials" : [ {
"id" : "ac8cfe7e-4a46-436d-8a72-8a2a061e803b",
"type" : "password",
"createdDate" : 1677181801357,
"secretData" : "{\"value\":\"AxFY+VsvoLTKflDvg3cRMjXdOZVOHoRAVxlUVR2YktXsadpo2Jl0ixehU/BByIAs/+TKl8ECM/qQdYV7rZ3rHw==\",\"salt\":\"WV5MxscAoBdJEvSs2HzWAg==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "c1796e6c-1396-4d11-85c2-409225d0ccba",
"createdTimestamp" : 1677181801479,
"username" : "desktop5.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "desktop5.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "198" ]
},
"credentials" : [ {
"id" : "5ca9a203-1a04-4be6-93fe-b98f566a6660",
"type" : "password",
"createdDate" : 1677181801516,
"secretData" : "{\"value\":\"WDBB8FDGzyzsjq+Dl+9NXDK7+/S+9VbRFcEyKPxuKe48JvI00s2ZKXE065VuiUAVMvg2RV1tbgw8m31o13m0wA==\",\"salt\":\"wSyEjFR+uWxSA9dc0SNuwQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "3873c0ba-349c-4bec-8be2-5ced8acd56ec",
"createdTimestamp" : 1675718483992,
@ -993,6 +1338,52 @@
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "c4bb6e6d-da8b-4c4f-9b83-fdf8516d6946",
"createdTimestamp" : 1677181798082,
"username" : "infra4.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "infra4.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "175" ]
},
"credentials" : [ {
"id" : "c7a26698-8d27-4d8f-a8dd-519f74a6d516",
"type" : "password",
"createdDate" : 1677181798173,
"secretData" : "{\"value\":\"k8GfsfeWZg8wfVikCTew3Pgfs/XmlyRl9duh5pe4obM8E+XzGQfgSgx1T4xEIlr/TYl0Hep9zRxEcEtoYNlz8g==\",\"salt\":\"TH94ZAwlFT9cuKgBtcLPzw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "9f269f95-0a5e-4cad-91d5-7b61ee2c795c",
"createdTimestamp" : 1677181798337,
"username" : "infra5.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "infra5.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "176" ]
},
"credentials" : [ {
"id" : "04faad56-de12-4a8f-ad54-0e8ef865b0ef",
"type" : "password",
"createdDate" : 1677181798373,
"secretData" : "{\"value\":\"5VJxVKz0uE0a8tZQMbBVaxcEqfdmJdsAdB6T8t0grY+L4etXZHnLlucKkCtQ9aJy1PcDMLjXu6ETrqoTuLkehA==\",\"salt\":\"a6PypYQwyD2Fv/e2UXzGvg==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "b8d0d90e-9a7e-446c-9984-082cb315af8f",
"createdTimestamp" : 1675718484095,
@ -1271,6 +1662,75 @@
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "60ba78a0-c346-4967-ad90-89b11d3e5e11",
"createdTimestamp" : 1677181798495,
"username" : "legal4.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "legal4.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "177" ]
},
"credentials" : [ {
"id" : "b3efb51c-8dd7-451d-b213-05363588e461",
"type" : "password",
"createdDate" : 1677181798529,
"secretData" : "{\"value\":\"WE9bf/FrGPslQr6NW6Cfq/2U6LLorW8R7PVhIIBqbMC0Ndqqv18wHceyZvLCBUkjiTukPhhUHYYvPCZct0KQjw==\",\"salt\":\"OgtPrHOUoLVNiD8kjVo2fg==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "d481687d-8c76-456d-9e0c-d66075380bbd",
"createdTimestamp" : 1677181798643,
"username" : "legal5.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "legal5.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "178" ]
},
"credentials" : [ {
"id" : "26804d36-5691-4ee2-8a03-ac0f69045d6a",
"type" : "password",
"createdDate" : 1677181798677,
"secretData" : "{\"value\":\"yAGa86rD7oVWAUjj2IApbBoIK1CevLxXiJQ3UDdHpJLVVDYRkCDF3qel111EqbsGsdOJ1g2cbc4ii2baM57Jog==\",\"salt\":\"2kzSBHUfFi+EHXJTVlnJ7w==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "8a03f00f-310d-4bae-b918-f6f128f98095",
"createdTimestamp" : 1677187934419,
"username" : "logos.program-lead",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "logos.program-lead@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "160" ]
},
"credentials" : [ {
"id" : "57e95f47-feb4-4328-88a6-8c8abde98db9",
"type" : "password",
"createdDate" : 1677187934455,
"secretData" : "{\"value\":\"2JMhNDo3jhT8M5w38JLVHiAN/njcXc6moaa9d6L0LYe8yOCxoxmVSqejFDQTyESxeMChBU7qj2NXIGhJMIsBiw==\",\"salt\":\"O5NxbiEqrDNzN041mEz/8Q==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "588e69b9-7534-4073-861d-500475b12b24",
"createdTimestamp" : 1675718484566,
@ -1427,8 +1887,8 @@
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "2a5d7caa-2c3e-4404-a133-ec220c0307db",
"createdTimestamp" : 1676566095780,
"id" : "6bc87bfb-6288-49df-a0f3-51db4e46201b",
"createdTimestamp" : 1677179612799,
"username" : "peopleops.partner2.sme",
"enabled" : true,
"totp" : false,
@ -1438,10 +1898,10 @@
"spiffworkflow-employeeid" : [ "173" ]
},
"credentials" : [ {
"id" : "64fc835c-b693-4fed-ab9f-952cbaadbbfd",
"id" : "c0c57e55-9d34-499f-80a8-0f0cd639e1ed",
"type" : "password",
"createdDate" : 1676566095815,
"secretData" : "{\"value\":\"w5nUlwlH1Z46WGhfejPIiRW6OkE9bcjHNCVySUDzMIpkbCm3f78XfuvdGSDeCpJ/FQCJuFo5ciDJ7ExXLyLfnQ==\",\"salt\":\"nz1xSxci+NFsyPZPhFDtZQ==\",\"additionalParameters\":{}}",
"createdDate" : 1677179612835,
"secretData" : "{\"value\":\"xUGT/9b0xVMemt7C30eO/TZfOaf3sO3j/XaADPWV+bXb5yNt0Dc6Ao0KVA0yzrPzCeXVa4C2BlHdXpx4l/nNUw==\",\"salt\":\"7UAhQDr50I44pVegqsm4aw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
@ -1450,8 +1910,8 @@
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "2df3aa5e-5e5b-4c4a-b9bc-3a916c651632",
"createdTimestamp" : 1676566095846,
"id" : "f8837814-21dc-475e-9067-41d1da670fff",
"createdTimestamp" : 1677179612959,
"username" : "peopleops.partner3.sme",
"enabled" : true,
"totp" : false,
@ -1461,10 +1921,56 @@
"spiffworkflow-employeeid" : [ "174" ]
},
"credentials" : [ {
"id" : "efaaec98-45c7-45cc-b4a4-32708882b72f",
"id" : "d83f8952-b7b7-4860-9af9-b697a84da13a",
"type" : "password",
"createdDate" : 1676566095880,
"secretData" : "{\"value\":\"B9M+AGxXUX4/+ce0y6AgFBm4F7phl5+6zToumcfheXglqcag2jr7iqLTtvwVkz3w8x7rmxUrzs7rkJPhK+/Jpg==\",\"salt\":\"rLFkhDJLxRuCNw7PNswlSQ==\",\"additionalParameters\":{}}",
"createdDate" : 1677179612997,
"secretData" : "{\"value\":\"ZBH+k4nUWrpVJoyu4j8nNsYvWMA8fIrS3rxl+Pfi8XUp5QUPxMr2slopxBpdn5rCFxC422rGvE76z59+lsGHFw==\",\"salt\":\"AGjic4GY4x47sB0STHebYw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "44c5d69d-f767-4f11-8d0b-8b6d42cfb1da",
"createdTimestamp" : 1677181799109,
"username" : "peopleops.partner4.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "peopleops.partner4.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "181" ]
},
"credentials" : [ {
"id" : "eeb6aa42-0141-4a0e-9135-22e519fe2259",
"type" : "password",
"createdDate" : 1677181799173,
"secretData" : "{\"value\":\"hRXbF8Hv5ZbrLFXr2ceYHva6LV9Nl8R4rWzigTLPkkxKeF87iaifmStRxSWdJv4LZsq4+qwJF3wretnaav6VUw==\",\"salt\":\"ho19cRuxsUuCF5fVo2/fSw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "abed16ea-ffb1-4ca4-a907-206f56d0c6d1",
"createdTimestamp" : 1677181799452,
"username" : "peopleops.partner5.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "peopleops.partner5.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "182" ]
},
"credentials" : [ {
"id" : "f07e520a-b3eb-4b1e-95b3-51c64902dd7b",
"type" : "password",
"createdDate" : 1677181799489,
"secretData" : "{\"value\":\"F2Nr7V6xjBFXI8Siw6rLYAN3ToHKkcq8PLU4SI+T7M4Oj6no1Jf9jtT+pqvQV65GNJ9p1F5U023EENnITa6r+g==\",\"salt\":\"oz69O4w8vVKgjtm2hEglmA==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
@ -1702,6 +2208,52 @@
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "8495cf5a-d592-4ef4-a25d-b7ab50e4682d",
"createdTimestamp" : 1677300032228,
"username" : "ppg.ba4.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "ppg.ba4.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "200" ]
},
"credentials" : [ {
"id" : "690a07af-b356-4021-b012-dc28a52744f7",
"type" : "password",
"createdDate" : 1677300032281,
"secretData" : "{\"value\":\"cRjSpQ9plAFY3XMwDnBXG3uvc6GLnczJuC8b5er7XMy58CpryiRNmi4nzbQNw0IIbvpdcjCTETfMIDMapobXnw==\",\"salt\":\"P9SaAzdcGV4a4Rc57ki8OQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "31143c6e-5ea0-4c84-a94c-0215e96226d2",
"createdTimestamp" : 1677300032328,
"username" : "ppg.ba5.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "ppg.ba5.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "201" ]
},
"credentials" : [ {
"id" : "6dc24a43-d541-4af5-9514-647a54ac09ee",
"type" : "password",
"createdDate" : 1677300032367,
"secretData" : "{\"value\":\"EAPcqH2t4w066csArNPWxT0pUKMR/RwDAYLdug9PPcmg4BFc71X3w+RXrXhNfcpDz8kTo/BMmjaxyVLDZGGODg==\",\"salt\":\"O+M+MVp1ETT3wyviAeUJnw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "f56fe387-d153-42c2-880a-6726bd624bae",
"createdTimestamp" : 1676302144802,
@ -1840,6 +2392,52 @@
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "1b2dc2b1-9706-4b69-aba8-088551d56622",
"createdTimestamp" : 1677181798799,
"username" : "security4.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "security4.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "179" ]
},
"credentials" : [ {
"id" : "4b764d7f-8c3b-4978-93aa-a2dbe0caf71c",
"type" : "password",
"createdDate" : 1677181798833,
"secretData" : "{\"value\":\"kn+VDn4d6qlJBJdhLYuJq4/97vfmZmiL3WXmW1OnhzYYv35splfBEkY12j0R4pxZeZ1OWBR7MJs1kB8AeC9cKQ==\",\"salt\":\"K+0rpb4TJ7J6z0F99AAklA==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "f76b4ac7-3beb-4465-ad8d-0d4a513782b4",
"createdTimestamp" : 1677181798958,
"username" : "security5.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "security5.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "180" ]
},
"credentials" : [ {
"id" : "3c5493c3-f689-44b1-ae51-94e7d0dff4a0",
"type" : "password",
"createdDate" : 1677181798992,
"secretData" : "{\"value\":\"7kr/Rt3nzDMDky8SBKOro3+sbpcDe6XBemF2CGN2NrBaNPdR+BlH9cpHPlxaTGTcwYe0TbNJo9xQ3FQu7NUwJg==\",\"salt\":\"W/jkh3VF9L05hyGNzHR9Bw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "b768e3ef-f905-4493-976c-bc3408c04bec",
"createdTimestamp" : 1675447832524,
@ -3175,7 +3773,7 @@
"subType" : "authenticated",
"subComponents" : { },
"config" : {
"allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "oidc-sha256-pairwise-sub-mapper" ]
"allowed-protocol-mapper-types" : [ "oidc-address-mapper", "oidc-usermodel-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper" ]
}
}, {
"id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd",
@ -3193,7 +3791,7 @@
"subType" : "anonymous",
"subComponents" : { },
"config" : {
"allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "saml-user-attribute-mapper", "oidc-address-mapper", "saml-user-property-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper" ]
"allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-full-name-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-address-mapper", "saml-role-list-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper" ]
}
}, {
"id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c",
@ -3283,7 +3881,7 @@
"internationalizationEnabled" : false,
"supportedLocales" : [ ],
"authenticationFlows" : [ {
"id" : "01b4b17c-bb82-41c3-b5b5-b9aadd21cb23",
"id" : "0e6ef523-0828-4847-9646-37c2833ad205",
"alias" : "Account verification options",
"description" : "Method with which to verity the existing account",
"providerId" : "basic-flow",
@ -3305,7 +3903,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "57574e2d-3c3d-4286-9fd1-d7f4ab86c6c1",
"id" : "7edc2f58-0e95-4374-b49c-8589b0a7ee64",
"alias" : "Authentication Options",
"description" : "Authentication options.",
"providerId" : "basic-flow",
@ -3334,7 +3932,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "1eb0e67c-2856-475e-8563-5eca431fd9d0",
"id" : "a4ad982f-def5-4845-840d-971205cae536",
"alias" : "Browser - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@ -3356,7 +3954,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "ff023867-aad5-4d19-a7da-60904727cd77",
"id" : "daa18225-9c2b-47b8-b31f-152cd64f4202",
"alias" : "Direct Grant - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@ -3378,7 +3976,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "c4f2f1e4-a32c-4559-9fe3-f88cc6cb63da",
"id" : "113bca83-78e1-4148-9124-27aeb9e278d3",
"alias" : "First broker login - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@ -3400,7 +3998,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "bfb28a5f-98d9-4ce0-ae8d-75a7ba1ad331",
"id" : "cd8c8c26-aa53-4cd4-a3e0-74a4a4376a98",
"alias" : "Handle Existing Account",
"description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider",
"providerId" : "basic-flow",
@ -3422,7 +4020,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "8b2075bd-9ad7-44c3-9a06-bc60a13beb7a",
"id" : "12cb511e-64b3-4506-8905-3e5c8f08fad9",
"alias" : "Reset - Conditional OTP",
"description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
"providerId" : "basic-flow",
@ -3444,7 +4042,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "1fdcbed7-e44b-4473-ab7b-25037309660b",
"id" : "89863115-cb99-4fbf-abfe-6a8a404b5148",
"alias" : "User creation or linking",
"description" : "Flow for the existing/non-existing user alternatives",
"providerId" : "basic-flow",
@ -3467,7 +4065,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "2f6e9208-b0e6-4941-9bd5-8f83ebc25b6c",
"id" : "c90e6d81-9306-41d0-8376-8c237b8757c6",
"alias" : "Verify Existing Account by Re-authentication",
"description" : "Reauthentication of existing account",
"providerId" : "basic-flow",
@ -3489,7 +4087,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "f059067e-d626-4be3-868f-4c8780318497",
"id" : "6d13fbf1-ba5d-4246-8085-5997f8d44941",
"alias" : "browser",
"description" : "browser based authentication",
"providerId" : "basic-flow",
@ -3525,7 +4123,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "c35098b5-3785-4f52-90e3-39b8f3841f0c",
"id" : "b68f54f3-6361-4480-82ed-a508be0376c2",
"alias" : "clients",
"description" : "Base authentication for clients",
"providerId" : "client-flow",
@ -3561,7 +4159,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "c78934b6-5386-49e7-89e8-9efe1088f5b2",
"id" : "8260dae3-441c-4d08-b96a-591ea07c10a6",
"alias" : "direct grant",
"description" : "OpenID Connect Resource Owner Grant",
"providerId" : "basic-flow",
@ -3590,7 +4188,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "7a08791f-0c8b-4e11-a588-f5856b75337b",
"id" : "3a101262-fb6e-453a-94a4-9119c12d4577",
"alias" : "docker auth",
"description" : "Used by Docker clients to authenticate against the IDP",
"providerId" : "basic-flow",
@ -3605,7 +4203,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "11e93dce-9673-4c99-ae7a-0edaf1c9b7e4",
"id" : "ef1643ac-cf03-41e8-bd89-659de5288339",
"alias" : "first broker login",
"description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
"providerId" : "basic-flow",
@ -3628,7 +4226,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "dbb50df7-ec6e-4a34-97f5-b484f1d8a76c",
"id" : "409616c0-64ab-4a9c-a286-a446ea717b53",
"alias" : "forms",
"description" : "Username, password, otp and other auth forms.",
"providerId" : "basic-flow",
@ -3650,7 +4248,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "d7a3dff9-249b-4811-9f36-b78119a4ce3f",
"id" : "a90dd7dc-f6b6-4cd1-85f4-f5aec95e5c7b",
"alias" : "http challenge",
"description" : "An authentication flow based on challenge-response HTTP Authentication Schemes",
"providerId" : "basic-flow",
@ -3672,7 +4270,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "ed4891ad-657c-45ac-9388-6c50d191124d",
"id" : "aa535b04-a256-4c0a-aad6-aaa6d053f821",
"alias" : "registration",
"description" : "registration flow",
"providerId" : "basic-flow",
@ -3688,7 +4286,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "f7c308b0-58de-4ed2-bf69-394144698e5a",
"id" : "cbaa3dde-4b4b-4344-841f-ba7468734286",
"alias" : "registration form",
"description" : "registration form",
"providerId" : "form-flow",
@ -3724,7 +4322,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "3fb75774-a3a5-4e01-bc4a-4e564451601d",
"id" : "62c55336-4753-4c4e-a4f9-03adb86f253f",
"alias" : "reset credentials",
"description" : "Reset credentials for a user if they forgot their password or something",
"providerId" : "basic-flow",
@ -3760,7 +4358,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "822d5c02-9ab3-4a9b-8fa4-1f020c5ffe08",
"id" : "35366a6a-8669-4110-9c62-a4f195243f2c",
"alias" : "saml ecp",
"description" : "SAML ECP Profile Authentication Flow",
"providerId" : "basic-flow",
@ -3776,13 +4374,13 @@
} ]
} ],
"authenticatorConfig" : [ {
"id" : "0e613377-2aaa-4fed-bb7d-4dea69d5c340",
"id" : "0d2f25a1-c358-4f08-9b44-02559d1d2b5f",
"alias" : "create unique user config",
"config" : {
"require.password.update.after.registration" : "false"
}
}, {
"id" : "ac6b9188-f0ec-48ec-852a-8e3b331b33a6",
"id" : "350789a4-bbaf-4cba-999d-f40f4cc632ea",
"alias" : "review profile config",
"config" : {
"update.profile.on.first.login" : "missing"

View File

@ -1,16 +1,31 @@
email,spiffworkflow-employeeid
# admin@spiffworkflow.org
amir@status.im
app.program.lead@status.im,121
app.program-lead@status.im,121
codex.project-lead@status.im,153
codex.sme@status.im,185
codex1.sme@status.im,186
codex2.sme@status.im,187
codex3.sme@status.im,188
codex4.sme@status.im,189
codex5.sme@status.im,190
core1.contributor@status.im,155
core2.contributor@status.im,156
core3.contributor@status.im,157
core4.contributor@status.im,158
core5.contributor@status.im,159
core6.contributor@status.im,199
core@status.im,113
dao.project.lead@status.im
desktop.program.lead@status.im
desktop.project-lead@status.im,192
desktop.project.lead@status.im
desktop.sme@status.im,193
desktop1.sme@status.im,194
desktop2.sme@status.im,195
desktop3.sme@status.im,196
desktop4.sme@status.im,197
desktop5.sme@status.im,198
fin@status.im,118
finance.lead@status.im,128
finance_user1@status.im
@ -20,6 +35,8 @@ infra.sme@status.im,119
infra1.sme@status.im,131
infra2.sme@status.im,132
infra3.sme@status.im,167
infra4.sme@status.im,175
infra5.sme@status.im,176
jakub@status.im
jarrad@status.im
lead@status.im,114
@ -28,11 +45,16 @@ legal.sme@status.im,125
legal1.sme@status.im,134
legal2.sme@status.im,165
legal3.sme@status.im,166
legal4.sme@status.im,177
legal5.sme@status.im,178
logos.program-lead@status.im,160
manuchehr@status.im,110
peopleops.partner.sme@status.im,148
peopleops.partner1.sme@status.im,149
peopleops.partner2.sme@status.im,173
peopleops.partner3.sme@status.im,174
peopleops.partner4.sme@status.im,181
peopleops.partner5.sme@status.im,182
peopleops.partner@status.im,150
peopleops.project-lead@status.im,147
peopleops.talent.sme@status.im,143
@ -43,6 +65,8 @@ ppg.ba.sme@status.im,138
ppg.ba1.sme@status.im,170
ppg.ba2.sme@status.im,171
ppg.ba3.sme@status.im,172
ppg.ba4.sme@status.im,200
ppg.ba5.sme@status.im,201
ppg.ba@status.im,127
sasha@status.im,112
security.project-lead@status.im,151
@ -50,4 +74,6 @@ security.sme@status.im,123
security1.sme@status.im,135
security2.sme@status.im,168
security3.sme@status.im,169
security4.sme@status.im,179
security5.sme@status.im,180
services.lead@status.im,122

File diff suppressed because it is too large Load Diff

View File

@ -48,7 +48,6 @@ APScheduler = "*"
Jinja2 = "^3.1.2"
RestrictedPython = "^6.0"
Flask-SQLAlchemy = "^3"
orjson = "^3.8.0"
# type hinting stuff
# these need to be in the normal (non dev-dependencies) section

View File

@ -210,7 +210,7 @@ def configure_sentry(app: flask.app.Flask) -> None:
# profiling doesn't work on windows, because of an issue like https://github.com/nvdv/vprof/issues/62
# but also we commented out profiling because it was causing segfaults (i guess it is marked experimental)
profiles_sample_rate = 0 if sys.platform.startswith("win") else 1
# profiles_sample_rate = 0 if sys.platform.startswith("win") else 1
sentry_sdk.init(
dsn=app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_DSN"),
@ -227,6 +227,6 @@ def configure_sentry(app: flask.app.Flask) -> None:
traces_sample_rate=float(sentry_traces_sample_rate),
traces_sampler=traces_sampler,
# The profiles_sample_rate setting is relative to the traces_sample_rate setting.
_experiments={"profiles_sample_rate": profiles_sample_rate},
# _experiments={"profiles_sample_rate": profiles_sample_rate},
before_send=before_send,
)

View File

@ -1518,7 +1518,7 @@ paths:
items:
$ref: "#/components/schemas/Task"
/task-data/{modified_process_model_identifier}/{process_instance_id}:
/task-data/{modified_process_model_identifier}/{process_instance_id}/{spiff_step}:
parameters:
- name: modified_process_model_identifier
in: path
@ -1532,32 +1532,24 @@ paths:
description: The unique id of an existing process instance.
schema:
type: integer
- name: all_tasks
in: query
required: false
description: If true, this wil return all tasks associated with the process instance and not just user tasks.
schema:
type: boolean
- name: spiff_step
in: query
required: false
in: path
required: true
description: If set will return the tasks as they were during a specific step of execution.
schema:
type: integer
get:
operationId: spiffworkflow_backend.routes.tasks_controller.task_data_show
summary: Get task data for a single task in a spiff step.
tags:
- Process Instances
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_task_list_with_task_data
summary: returns the list of all user tasks associated with process instance with the task data
responses:
"200":
description: list of tasks
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Task"
$ref: "#/components/schemas/Task"
/task-data/{modified_process_model_identifier}/{process_instance_id}/{task_id}:
parameters:
@ -1579,6 +1571,12 @@ paths:
description: The unique id of the task.
schema:
type: string
- name: spiff_step
in: query
required: false
description: If set will return the tasks as they were during a specific step of execution.
schema:
type: integer
put:
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_data_update
summary: Update the task data for requested instance and task

View File

@ -22,78 +22,7 @@ class MultiInstanceType(enum.Enum):
class Task:
"""Task."""
##########################################################################
# Custom properties and validations defined in Camunda form fields #
##########################################################################
# Custom task title
PROP_EXTENSIONS_TITLE = "display_name"
PROP_EXTENSIONS_CLEAR_DATA = "clear_data"
# Field Types
FIELD_TYPE_STRING = "string"
FIELD_TYPE_LONG = "long"
FIELD_TYPE_BOOLEAN = "boolean"
FIELD_TYPE_DATE = "date"
FIELD_TYPE_ENUM = "enum"
FIELD_TYPE_TEXTAREA = "textarea" # textarea: Multiple lines of text
FIELD_TYPE_AUTO_COMPLETE = "autocomplete"
FIELD_TYPE_FILE = "file"
FIELD_TYPE_FILES = "files" # files: Multiple files
FIELD_TYPE_TEL = "tel" # tel: Phone number
FIELD_TYPE_EMAIL = "email" # email: Email address
FIELD_TYPE_URL = "url" # url: Website address
FIELD_PROP_AUTO_COMPLETE_MAX = ( # Not used directly, passed in from the front end.
"autocomplete_num"
)
# Required field
FIELD_CONSTRAINT_REQUIRED = "required"
# Field properties and expressions Expressions
FIELD_PROP_REPEAT = "repeat"
FIELD_PROP_READ_ONLY = "read_only"
FIELD_PROP_LDAP_LOOKUP = "ldap.lookup"
FIELD_PROP_READ_ONLY_EXPRESSION = "read_only_expression"
FIELD_PROP_HIDE_EXPRESSION = "hide_expression"
FIELD_PROP_REQUIRED_EXPRESSION = "required_expression"
FIELD_PROP_LABEL_EXPRESSION = "label_expression"
FIELD_PROP_REPEAT_HIDE_EXPRESSION = "repeat_hide_expression"
FIELD_PROP_VALUE_EXPRESSION = "value_expression"
# Enum field options
FIELD_PROP_SPREADSHEET_NAME = "spreadsheet.name"
FIELD_PROP_DATA_NAME = "data.name"
FIELD_PROP_VALUE_COLUMN = "value.column"
FIELD_PROP_LABEL_COLUMN = "label.column"
# Enum field options values pulled from task data
# Group and Repeat functions
FIELD_PROP_GROUP = "group"
FIELD_PROP_REPLEAT = "repeat"
FIELD_PROP_REPLEAT_TITLE = "repeat_title"
FIELD_PROP_REPLEAT_BUTTON = "repeat_button_label"
# File specific field properties
FIELD_PROP_DOC_CODE = "doc_code" # to associate a file upload field with a doc code
FIELD_PROP_FILE_DATA = ( # to associate a bit of data with a specific file upload file.
"file_data"
)
# Additional properties
FIELD_PROP_ENUM_TYPE = "enum_type"
FIELD_PROP_BOOLEAN_TYPE = "boolean_type"
FIELD_PROP_TEXT_AREA_ROWS = "rows"
FIELD_PROP_TEXT_AREA_COLS = "cols"
FIELD_PROP_TEXT_AREA_AUTO = "autosize"
FIELD_PROP_PLACEHOLDER = "placeholder"
FIELD_PROP_DESCRIPTION = "description"
FIELD_PROP_MARKDOWN_DESCRIPTION = "markdown_description"
FIELD_PROP_HELP = "help"
##########################################################################
HUMAN_TASK_TYPES = ["User Task", "Manual Task"]
def __init__(
self,
@ -202,20 +131,6 @@ class Task:
"task_spiff_step": self.task_spiff_step,
}
@classmethod
def valid_property_names(cls) -> list[str]:
"""Valid_property_names."""
return [
value for name, value in vars(cls).items() if name.startswith("FIELD_PROP")
]
@classmethod
def valid_field_types(cls) -> list[str]:
"""Valid_field_types."""
return [
value for name, value in vars(cls).items() if name.startswith("FIELD_TYPE")
]
@classmethod
def task_state_name_to_int(cls, task_state_name: str) -> int:
task_state_integers = {v: k for k, v in TaskStateNames.items()}

View File

@ -514,7 +514,6 @@ def process_instance_task_list_without_task_data_for_me(
process_instance,
all_tasks,
spiff_step,
get_task_data=False,
)
@ -531,24 +530,6 @@ def process_instance_task_list_without_task_data(
process_instance,
all_tasks,
spiff_step,
get_task_data=False,
)
def process_instance_task_list_with_task_data(
modified_process_model_identifier: str,
process_instance_id: int,
all_tasks: bool = False,
spiff_step: int = 0,
) -> flask.wrappers.Response:
"""Process_instance_task_list_with_task_data."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
return process_instance_task_list(
modified_process_model_identifier,
process_instance,
all_tasks,
spiff_step,
get_task_data=True,
)
@ -557,7 +538,6 @@ def process_instance_task_list(
process_instance: ProcessInstanceModel,
all_tasks: bool = False,
spiff_step: int = 0,
get_task_data: bool = False,
) -> flask.wrappers.Response:
"""Process_instance_task_list."""
step_detail_query = db.session.query(SpiffStepDetailsModel).filter(
@ -579,25 +559,12 @@ def process_instance_task_list(
subprocess_state_overrides = {}
for step_detail in step_details:
if step_detail.task_id in tasks:
task_data = (
step_detail.task_json["task_data"] | step_detail.task_json["python_env"]
)
if task_data is None:
task_data = {}
tasks[step_detail.task_id]["data"] = task_data
tasks[step_detail.task_id]["state"] = Task.task_state_name_to_int(
step_detail.task_state
)
else:
for subprocess_id, subprocess_info in subprocesses.items():
if step_detail.task_id in subprocess_info["tasks"]:
task_data = (
step_detail.task_json["task_data"]
| step_detail.task_json["python_env"]
)
if task_data is None:
task_data = {}
subprocess_info["tasks"][step_detail.task_id]["data"] = task_data
subprocess_info["tasks"][step_detail.task_id]["state"] = (
Task.task_state_name_to_int(step_detail.task_state)
)
@ -654,8 +621,6 @@ def process_instance_task_list(
calling_subprocess_task_id=calling_subprocess_task_id,
task_spiff_step=task_spiff_step,
)
if get_task_data:
task.data = spiff_task.data
tasks.append(task)
return make_response(jsonify(tasks), 200)

View File

@ -36,6 +36,7 @@ from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
from spiffworkflow_backend.models.task import Task
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.routes.process_api_blueprint import (
@ -72,8 +73,6 @@ class ReactJsonSchemaSelectOption(TypedDict):
enum: list[str]
# TODO: see comment for before_request
# @process_api_blueprint.route("/v1.0/tasks", methods=["GET"])
def task_list_my_tasks(
process_instance_id: Optional[int] = None, page: int = 1, per_page: int = 100
) -> flask.wrappers.Response:
@ -108,6 +107,11 @@ def task_list_my_tasks(
_get_potential_owner_usernames(assigned_user)
)
# FIXME: this breaks postgres. Look at commit c147cdb47b1481f094b8c3d82dc502fe961f4977 for
# the postgres fix but it breaks the method for mysql.
# error in postgres:
# psycopg2.errors.GroupingError) column \"process_instance.process_model_identifier\" must
# appear in the GROUP BY clause or be used in an aggregate function
human_tasks = human_task_query.add_columns(
HumanTaskModel.task_id.label("id"), # type: ignore
HumanTaskModel.task_name,
@ -171,6 +175,46 @@ def task_list_for_my_groups(
)
def task_data_show(
modified_process_model_identifier: str,
process_instance_id: int,
spiff_step: int = 0,
) -> flask.wrappers.Response:
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
step_detail = (
db.session.query(SpiffStepDetailsModel)
.filter(
SpiffStepDetailsModel.process_instance_id == process_instance.id,
SpiffStepDetailsModel.spiff_step == spiff_step,
)
.first()
)
if step_detail is None:
raise ApiError(
error_code="spiff_step_for_proces_instance_not_found",
message=(
"The given spiff step for the given process instance could not be"
" found."
),
status_code=400,
)
processor = ProcessInstanceProcessor(process_instance)
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
step_detail.bpmn_task_identifier, processor.bpmn_process_instance
)
task_data = step_detail.task_json["task_data"] | step_detail.task_json["python_env"]
task = ProcessInstanceService.spiff_task_to_api_task(
processor,
spiff_task,
task_spiff_step=spiff_step,
)
task.data = task_data
return make_response(jsonify(task), 200)
def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None:
if task.form_ui_schema is None:
task.form_ui_schema = {}

View File

@ -482,11 +482,6 @@ class AuthorizationService:
"""Profile, picture, website, gender, birthdate, zoneinfo, locale, and updated_at. """
"""Email."""
is_new_user = False
user_model = (
UserModel.query.filter(UserModel.service == user_info["iss"])
.filter(UserModel.service_id == user_info["sub"])
.first()
)
user_attributes = {}
if "email" in user_info:
@ -515,6 +510,13 @@ class AuthorizationService:
tenant_specific_field
]
# example value for service: http://localhost:7002/realms/spiffworkflow (keycloak url)
user_model = (
UserModel.query.filter(UserModel.service == user_attributes["service"])
.filter(UserModel.username == user_attributes["username"])
.first()
)
if user_model is None:
current_app.logger.debug("create_user in login_return")
is_new_user = True

View File

@ -10,6 +10,7 @@ from flask.app import Flask
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
from spiffworkflow_backend.models.task import Task
# flask logging formats:
@ -218,9 +219,13 @@ class DBHandler(logging.Handler):
bpmn_task_type = record.task_type if hasattr(record, "task_type") else None # type: ignore
timestamp = record.created
message = record.msg if hasattr(record, "msg") else None
current_user_id = (
record.current_user_id if hasattr(record, "current_user_id") else None # type: ignore
)
current_user_id = None
if bpmn_task_type in Task.HUMAN_TASK_TYPES and hasattr(
record, "current_user_id"
):
current_user_id = record.current_user_id # type: ignore
spiff_step = (
record.spiff_step # type: ignore
if hasattr(record, "spiff_step") and record.spiff_step is not None # type: ignore

View File

@ -444,53 +444,6 @@ class ProcessInstanceProcessor:
) = ProcessInstanceProcessor.get_process_model_and_subprocesses(
process_instance_model.process_model_identifier
)
else:
bpmn_json_length = len(process_instance_model.bpmn_json.encode("utf-8"))
megabyte = float(1024**2)
json_size = bpmn_json_length / megabyte
if json_size > 1:
wf_json = json.loads(process_instance_model.bpmn_json)
if "spec" in wf_json and "tasks" in wf_json:
task_tree = wf_json["tasks"]
test_spec = wf_json["spec"]
task_size = "{:.2f}".format(
len(json.dumps(task_tree).encode("utf-8")) / megabyte
)
spec_size = "{:.2f}".format(
len(json.dumps(test_spec).encode("utf-8")) / megabyte
)
message = (
"Workflow "
+ process_instance_model.process_model_identifier
+ f" JSON Size is over 1MB:{json_size:.2f} MB"
)
message += f"\n Task Size: {task_size}"
message += f"\n Spec Size: {spec_size}"
current_app.logger.warning(message)
def check_sub_specs(
test_spec: dict, indent: int = 0, show_all: bool = False
) -> None:
"""Check_sub_specs."""
for my_spec_name in test_spec["task_specs"]:
my_spec = test_spec["task_specs"][my_spec_name]
my_spec_size = (
len(json.dumps(my_spec).encode("utf-8")) / megabyte
)
if my_spec_size > 0.1 or show_all:
current_app.logger.warning(
(" " * indent)
+ "Sub-Spec "
+ my_spec["name"]
+ " :"
+ f"{my_spec_size:.2f}"
)
if "spec" in my_spec:
if my_spec["name"] == "Call_Emails_Process_Email":
pass
check_sub_specs(my_spec["spec"], indent + 5)
check_sub_specs(test_spec, 5)
self.process_model_identifier = process_instance_model.process_model_identifier
self.process_model_display_name = (

View File

@ -235,23 +235,6 @@ class ProcessInstanceService:
# maybe move this out once we have the interstitial page since this is here just so we can get the next human task
processor.do_engine_steps(save=True)
@staticmethod
def extract_form_data(latest_data: dict, task: SpiffTask) -> dict:
"""Extracts data from the latest_data that is directly related to the form that is being submitted."""
data = {}
if hasattr(task.task_spec, "form"):
for field in task.task_spec.form.fields:
if field.has_property(Task.FIELD_PROP_REPEAT):
group = field.get_property(Task.FIELD_PROP_REPEAT)
if group in latest_data:
data[group] = latest_data[group]
else:
value = ProcessInstanceService.get_dot_value(field.id, latest_data)
if value is not None:
ProcessInstanceService.set_dot_value(field.id, value, data)
return data
@staticmethod
def create_dot_dict(data: dict) -> dict[str, Any]:
"""Create_dot_dict."""

View File

@ -2377,6 +2377,10 @@ class TestProcessApi(BaseTest):
# + 2 -Two messages logged for the API Calls used to create the processes.
assert len(response.json["results"]) == 6
@pytest.mark.skipif(
os.environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "postgres",
reason="look at comment in tasks_controller method task_list_my_tasks",
)
def test_correct_user_can_get_and_update_a_task(
self,
app: Flask,
@ -2788,12 +2792,18 @@ class TestProcessApi(BaseTest):
assert response.json["status"] == "complete"
response = client.get(
f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}?all_tasks=true",
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/task-info?all_tasks=true",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
end = next(task for task in response.json if task["type"] == "End Event")
assert end["data"]["result"] == {"message": "message 1"}
end_task = next(task for task in response.json if task["type"] == "End Event")
response = client.get(
f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{end_task['task_spiff_step']}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
task = response.json
assert task["data"]["result"] == {"message": "message 1"}
def test_manual_complete_task(
self,
@ -2854,7 +2864,7 @@ class TestProcessApi(BaseTest):
)
response = client.get(
f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}",
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/task-info",
headers=self.logged_in_headers(with_super_admin_user),
)
assert len(response.json) == 1

View File

@ -129,6 +129,8 @@ class TestMessageService(BaseTest):
MessageInstanceModel.query.filter_by(message_type="receive")
.filter_by(status="ready")
.filter_by(process_instance_id=self.process_instance.id)
.order_by(
MessageInstanceModel.id)
.all()
)
assert len(waiting_messages) == 0
@ -143,6 +145,8 @@ class TestMessageService(BaseTest):
# The message receiver process is also complete
message_receiver_process = ProcessInstanceModel.query.filter_by(
process_model_identifier="test_group/message_receive"
).order_by(
MessageInstanceModel.id
).first()
assert message_receiver_process.status == "complete"
@ -186,6 +190,7 @@ class TestMessageService(BaseTest):
send_messages = (
MessageInstanceModel.query.filter_by(message_type="send")
.filter_by(process_instance_id=self.process_instance.id)
.order_by(MessageInstanceModel.id)
.all()
)
assert len(send_messages) == 1
@ -202,6 +207,7 @@ class TestMessageService(BaseTest):
MessageInstanceModel.query.filter_by(message_type="receive")
.filter_by(status="ready")
.filter_by(process_instance_id=self.process_instance.id)
.order_by(MessageInstanceModel.id)
.all()
)
assert len(waiting_messages) == 1
@ -287,11 +293,11 @@ class TestMessageService(BaseTest):
assert len(process_instance_result) == 3
process_instance_receiver_one = ProcessInstanceModel.query.filter_by(
process_model_identifier="test_group/message_receiver_one"
).first()
).order_by(MessageInstanceModel.id).first()
assert process_instance_receiver_one is not None
process_instance_receiver_two = ProcessInstanceModel.query.filter_by(
process_model_identifier="test_group/message_receiver_two"
).first()
).order_by(MessageInstanceModel.id).first()
assert process_instance_receiver_two is not None
# just make sure it's a different process instance
@ -308,7 +314,9 @@ class TestMessageService(BaseTest):
assert process_instance_receiver_two.id != process_instance_sender.id
assert process_instance_receiver_two.status == "complete"
message_instance_result = MessageInstanceModel.query.all()
message_instance_result = MessageInstanceModel.query.order_by(
MessageInstanceModel.id
).order_by(MessageInstanceModel.id).all()
assert len(message_instance_result) == 7
message_instance_receiver_one = [
@ -330,12 +338,16 @@ class TestMessageService(BaseTest):
# more messages that need to be picked up.
MessageService.correlate_all_message_instances()
message_instance_result = MessageInstanceModel.query.all()
message_instance_result = MessageInstanceModel.query.order_by(
MessageInstanceModel.id
).order_by(MessageInstanceModel.id).all()
assert len(message_instance_result) == 8
for message_instance in message_instance_result:
assert message_instance.status == "completed"
process_instance_result = ProcessInstanceModel.query.all()
process_instance_result = ProcessInstanceModel.query.order_by(
MessageInstanceModel.id
).all()
assert len(process_instance_result) == 3
for process_instance in process_instance_result:
assert process_instance.status == "complete"

View File

@ -17,7 +17,7 @@ export const useUriListForPermissions = () => {
processInstanceResumePath: `/v1.0/process-instance-resume/${params.process_model_id}/${params.process_instance_id}`,
processInstanceSuspendPath: `/v1.0/process-instance-suspend/${params.process_model_id}/${params.process_instance_id}`,
processInstanceResetPath: `/v1.0/process-instance-reset/${params.process_model_id}/${params.process_instance_id}`,
processInstanceTaskListDataPath: `/v1.0/task-data/${params.process_model_id}/${params.process_instance_id}`,
processInstanceTaskDataPath: `/v1.0/task-data/${params.process_model_id}/${params.process_instance_id}`,
processInstanceSendEventPath: `/v1.0/send-event/${params.process_model_id}/${params.process_instance_id}`,
processInstanceCompleteTaskPath: `/v1.0/complete-task/${params.process_model_id}/${params.process_instance_id}`,
processInstanceTaskListPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}/task-info`,

View File

@ -10,6 +10,7 @@ import {
} from '../helpers';
import HttpService from '../services/HttpService';
import { useUriListForPermissions } from '../hooks/UriListForPermissions';
import UserService from '../services/UserService';
type OwnProps = {
variant: string;
@ -28,6 +29,8 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) {
processInstanceShowPageBaseUrl = `/admin/process-instances/${params.process_model_id}`;
}
const userEmail = UserService.getUserEmail();
useEffect(() => {
const setProcessInstanceLogListFromResult = (result: any) => {
setProcessInstanceLogs(result.results);
@ -45,56 +48,91 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) {
isDetailedView,
]);
const getTableRow = (row: any) => {
const tableRow = [];
const taskNameCell = (
<td>
{row.bpmn_task_name ||
(row.bpmn_task_type === 'Default Start Event'
? 'Process Started'
: '') ||
(row.bpmn_task_type === 'End Event' ? 'Process Ended' : '')}
</td>
);
if (isDetailedView) {
tableRow.push(
<>
<td data-qa="paginated-entity-id">{row.id}</td>
<td>{row.bpmn_process_identifier}</td>
{taskNameCell}
</>
);
} else {
tableRow.push(
<>
{taskNameCell}
<td>{row.bpmn_process_identifier}</td>
</>
);
}
if (isDetailedView) {
tableRow.push(
<>
<td>{row.bpmn_task_type}</td>
<td>{row.message}</td>
<td>{row.username === userEmail ? 'me 🔥' : row.username}</td>
</>
);
}
tableRow.push(
<td>
<Link
data-qa="process-instance-show-link"
to={`${processInstanceShowPageBaseUrl}/${row.process_instance_id}/${row.spiff_step}`}
>
{convertSecondsToFormattedDateTime(row.timestamp)}
</Link>
</td>
);
return <tr key={row.id}>{tableRow}</tr>;
};
const buildTable = () => {
const rows = processInstanceLogs.map((row) => {
const rowToUse = row as any;
return (
<tr key={rowToUse.id}>
<td data-qa="paginated-entity-id">{rowToUse.id}</td>
<td>
{rowToUse.bpmn_task_name ||
(rowToUse.bpmn_task_type === 'Default Start Event'
? 'Process Started'
: '') ||
(rowToUse.bpmn_task_type === 'End Event' ? 'Process Ended' : '')}
</td>
{isDetailedView && (
<>
<td>{rowToUse.message}</td>
<td>{rowToUse.bpmn_task_identifier}</td>
<td>{rowToUse.bpmn_task_type}</td>
</>
)}
<td>{rowToUse.bpmn_process_identifier}</td>
<td>{rowToUse.username}</td>
<td>
<Link
data-qa="process-instance-show-link"
to={`${processInstanceShowPageBaseUrl}/${rowToUse.process_instance_id}/${rowToUse.spiff_step}`}
>
{convertSecondsToFormattedDateTime(rowToUse.timestamp)}
</Link>
</td>
</tr>
);
return getTableRow(row);
});
const tableHeaders = [];
if (isDetailedView) {
tableHeaders.push(
<>
<th>Id</th>
<th>Bpmn Process</th>
<th>Task Name</th>
</>
);
} else {
tableHeaders.push(
<>
<th>Event</th>
<th>Bpmn Process</th>
</>
);
}
if (isDetailedView) {
tableHeaders.push(
<>
<th>Task Type</th>
<th>Message</th>
<th>User</th>
</>
);
}
tableHeaders.push(<th>Timestamp</th>);
return (
<Table size="lg">
<thead>
<tr>
<th>Id</th>
<th>Task Name</th>
{isDetailedView && (
<>
<th>Message</th>
<th>Task Identifier</th>
<th>Task Type</th>
</>
)}
<th>Bpmn Process Identifier</th>
<th>User</th>
<th>Timestamp</th>
</tr>
<tr>{tableHeaders}</tr>
</thead>
<tbody>{rows}</tbody>
</Table>

View File

@ -27,6 +27,7 @@ import {
Modal,
Dropdown,
Stack,
Loading,
// @ts-ignore
} from '@carbon/react';
import { Can } from '@casl/react';
@ -65,8 +66,12 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
useState<ProcessInstance | null>(null);
const [tasks, setTasks] = useState<ProcessInstanceTask[] | null>(null);
const [tasksCallHadError, setTasksCallHadError] = useState<boolean>(false);
const [taskToDisplay, setTaskToDisplay] = useState<object | null>(null);
const [taskToDisplay, setTaskToDisplay] =
useState<ProcessInstanceTask | null>(null);
const [taskDataToDisplay, setTaskDataToDisplay] = useState<string>('');
const [showTaskDataLoading, setShowTaskDataLoading] =
useState<boolean>(false);
const [processDataToDisplay, setProcessDataToDisplay] =
useState<ProcessData | null>(null);
const [editingTaskData, setEditingTaskData] = useState<boolean>(false);
@ -99,7 +104,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
[targetUris.messageInstanceListPath]: ['GET'],
[targetUris.processInstanceActionPath]: ['DELETE'],
[targetUris.processInstanceLogListPath]: ['GET'],
[targetUris.processInstanceTaskListDataPath]: ['GET', 'PUT'],
[targetUris.processInstanceTaskDataPath]: ['GET', 'PUT'],
[targetUris.processInstanceSendEventPath]: ['POST'],
[targetUris.processInstanceCompleteTaskPath]: ['POST'],
[targetUris.processModelShowPath]: ['PUT'],
@ -145,9 +150,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
taskParams = `${taskParams}&spiff_step=${params.spiff_step}`;
}
let taskPath = '';
if (ability.can('GET', targetUris.processInstanceTaskListDataPath)) {
taskPath = `${targetUris.processInstanceTaskListDataPath}${taskParams}`;
} else if (ability.can('GET', taskListPath)) {
if (ability.can('GET', taskListPath)) {
taskPath = `${taskListPath}${taskParams}`;
}
if (taskPath) {
@ -557,11 +560,33 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
return <div />;
};
const initializeTaskDataToDisplay = (task: any) => {
if (task == null) {
const processTaskResult = (result: ProcessInstanceTask) => {
if (result == null) {
setTaskDataToDisplay('');
} else {
setTaskDataToDisplay(JSON.stringify(task.data, null, 2));
setTaskDataToDisplay(JSON.stringify(result.data, null, 2));
}
setShowTaskDataLoading(false);
};
const initializeTaskDataToDisplay = (task: ProcessInstanceTask | null) => {
if (
task &&
task.state === 'COMPLETED' &&
ability.can('GET', targetUris.processInstanceTaskDataPath)
) {
setShowTaskDataLoading(true);
HttpService.makeCallToBackend({
path: `${targetUris.processInstanceTaskDataPath}/${task.task_spiff_step}`,
httpMethod: 'GET',
successCallback: processTaskResult,
failureCallback: (error: any) => {
setTaskDataToDisplay(`ERROR: ${error.message}`);
setShowTaskDataLoading(false);
},
});
} else {
setTaskDataToDisplay('');
}
};
@ -668,7 +693,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
const canEditTaskData = (task: any) => {
return (
processInstance &&
ability.can('PUT', targetUris.processInstanceTaskListDataPath) &&
ability.can('PUT', targetUris.processInstanceTaskDataPath) &&
isCurrentTask(task) &&
processInstance.status === 'suspended' &&
showingLastSpiffStep()
@ -742,8 +767,13 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
const saveTaskDataResult = (_: any) => {
setEditingTaskData(false);
const dataObject = taskDataStringToObject(taskDataToDisplay);
const taskToDisplayCopy = { ...taskToDisplay, data: dataObject }; // spread operator
setTaskToDisplay(taskToDisplayCopy);
if (taskToDisplay) {
const taskToDisplayCopy: ProcessInstanceTask = {
...taskToDisplay,
data: dataObject,
}; // spread operator
setTaskToDisplay(taskToDisplayCopy);
}
refreshPage();
};
@ -757,7 +787,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
// taskToUse is copy of taskToDisplay, with taskDataToDisplay in data attribute
const taskToUse: any = { ...taskToDisplay, data: taskDataToDisplay };
HttpService.makeCallToBackend({
path: `${targetUris.processInstanceTaskListDataPath}/${taskToUse.id}`,
path: `${targetUris.processInstanceTaskDataPath}/${taskToUse.id}`,
httpMethod: 'PUT',
successCallback: saveTaskDataResult,
failureCallback: addError,
@ -901,6 +931,10 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
};
const taskDataContainer = () => {
let taskDataClassName = '';
if (taskDataToDisplay.startsWith('ERROR:')) {
taskDataClassName = 'failure-string';
}
return editingTaskData ? (
<Editor
height={600}
@ -910,7 +944,12 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
onChange={(value) => setTaskDataToDisplay(value || '')}
/>
) : (
<pre>{taskDataToDisplay}</pre>
<>
{showTaskDataLoading ? (
<Loading className="some-class" withOverlay={false} small />
) : null}
<pre className={taskDataClassName}>{taskDataToDisplay}</pre>
</>
);
};

View File

@ -32,10 +32,10 @@ export default function TaskShow() {
useEffect(() => {
const processResult = (result: ProcessInstanceTask) => {
setTask(result);
const url = `/task-data/${modifyProcessIdentifierForPathParam(
const url = `/v1.0/process-instances/for-me/${modifyProcessIdentifierForPathParam(
result.process_model_identifier
)}/${params.process_instance_id}`;
// if user is unauthorized to get task-data then don't do anything
)}/${params.process_instance_id}/task-info`;
// if user is unauthorized to get process-instance task-info then don't do anything
// Checking like this so we can dynamically create the url with the correct process model
// instead of passing the process model identifier in through the params
HttpService.makeCallToBackend({