Merge branch 'main' into feature/create_containers
This commit is contained in:
commit
0208f89e2e
|
@ -1,4 +1,4 @@
|
|||
name: Tests
|
||||
name: Backend Tests
|
||||
|
||||
on:
|
||||
- push
|
||||
|
@ -16,7 +16,10 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- { python: "3.11", os: "ubuntu-latest", session: "safety" }
|
||||
# FIXME: https://github.com/mysql/mysql-connector-python/pull/86
|
||||
# put back when poetry update protobuf mysql-connector-python updates protobuf
|
||||
# right now mysql is forcing protobuf to version 3
|
||||
# - { python: "3.11", os: "ubuntu-latest", session: "safety" }
|
||||
- { python: "3.11", os: "ubuntu-latest", session: "mypy" }
|
||||
- { python: "3.10", os: "ubuntu-latest", session: "mypy" }
|
||||
- { python: "3.9", os: "ubuntu-latest", session: "mypy" }
|
||||
|
@ -280,7 +283,7 @@ jobs:
|
|||
# so just skip everything but main
|
||||
if: github.ref_name == 'main'
|
||||
with:
|
||||
projectBaseDir: spiffworkflow-backend
|
||||
projectBaseDir: spiffworkflow-frontend
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
|
|
|
@ -0,0 +1,128 @@
|
|||
name: Frontend Tests
|
||||
|
||||
on:
|
||||
- push
|
||||
- pull_request
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: spiffworkflow-frontend
|
||||
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Development Code
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
|
||||
fetch-depth: 0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18.x
|
||||
- run: npm install
|
||||
- run: npm run lint
|
||||
- run: npm test
|
||||
- run: npm run build --if-present
|
||||
- name: SonarCloud Scan
|
||||
# thought about just skipping dependabot
|
||||
# if: ${{ github.actor != 'dependabot[bot]' }}
|
||||
# but figured all pull requests seems better, since none of them will have access to sonarcloud.
|
||||
# however, with just skipping pull requests, the build associated with "Triggered via push" is also associated with the pull request and also fails hitting sonarcloud
|
||||
# if: ${{ github.event_name != 'pull_request' }}
|
||||
# so just skip everything but main
|
||||
if: github.ref_name == 'main'
|
||||
uses: sonarsource/sonarcloud-github-action@master
|
||||
with:
|
||||
projectBaseDir: spiffworkflow-frontend
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
# part about saving PR number and then using it from auto-merge-dependabot-prs from:
|
||||
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_run
|
||||
- name: Save PR number
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.number }}
|
||||
run: |
|
||||
mkdir -p ./pr
|
||||
echo "$PR_NUMBER" > ./pr/pr_number
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: pr_number
|
||||
path: pr/
|
||||
|
||||
cypress-run:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Checkout Samples
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: sartography/sample-process-models
|
||||
path: sample-process-models
|
||||
- name: start_keycloak
|
||||
working-directory: ./spiffworkflow-backend
|
||||
run: ./bin/start_keycloak
|
||||
- name: start_backend
|
||||
working-directory: ./spiffworkflow-backend
|
||||
run: ./bin/build_and_run_with_docker_compose
|
||||
timeout-minutes: 20
|
||||
env:
|
||||
SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA: "true"
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME: "acceptance_tests.yml"
|
||||
- name: start_frontend
|
||||
# working-directory: ./spiffworkflow-frontend
|
||||
run: ./bin/build_and_run_with_docker_compose
|
||||
- name: wait_for_backend
|
||||
working-directory: ./spiffworkflow-backend
|
||||
run: ./bin/wait_for_server_to_be_up 5
|
||||
- name: wait_for_frontend
|
||||
# working-directory: ./spiffworkflow-frontend
|
||||
run: ./bin/wait_for_frontend_to_be_up 5
|
||||
- name: wait_for_keycloak
|
||||
working-directory: ./spiffworkflow-backend
|
||||
run: ./bin/wait_for_keycloak 5
|
||||
- name: Cypress run
|
||||
uses: cypress-io/github-action@v4
|
||||
with:
|
||||
working-directory: ./spiffworkflow-frontend
|
||||
browser: chrome
|
||||
# only record on push, not pull_request, since we do not have secrets for PRs,
|
||||
# so the required CYPRESS_RECORD_KEY will not be available.
|
||||
# we have limited runs in cypress cloud, so only record main builds
|
||||
record: ${{ github.ref_name == 'main' && github.event_name == 'push' }}
|
||||
env:
|
||||
# pass the Dashboard record key as an environment variable
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
# pass GitHub token to allow accurately detecting a build vs a re-run build
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: get_backend_logs_from_docker_compose
|
||||
if: failure()
|
||||
working-directory: ./spiffworkflow-backend
|
||||
run: ./bin/get_logs_from_docker_compose >./log/docker_compose.log
|
||||
- name: Upload logs
|
||||
if: failure()
|
||||
uses: "actions/upload-artifact@v3.0.0"
|
||||
with:
|
||||
name: spiffworkflow-backend-logs
|
||||
path: "./spiffworkflow-backend/log/*.log"
|
||||
|
||||
# https://github.com/cypress-io/github-action#artifacts
|
||||
- name: upload_screenshots
|
||||
uses: actions/upload-artifact@v2
|
||||
if: failure()
|
||||
with:
|
||||
name: cypress-screenshots
|
||||
path: ./spiffworkflow-frontend/cypress/screenshots
|
||||
# Test run video was always captured, so this action uses "always()" condition
|
||||
- name: upload_videos
|
||||
uses: actions/upload-artifact@v2
|
||||
if: failure()
|
||||
with:
|
||||
name: cypress-videos
|
||||
path: ./spiffworkflow-frontend/cypress/videos
|
|
@ -8,7 +8,7 @@ project:
|
|||
|
||||
# run all lint checks and tests
|
||||
|
||||
`./bin/run_pyl`
|
||||
./bin/run_pyl
|
||||
|
||||
Requires at root:
|
||||
- .darglint
|
||||
|
|
|
@ -120,6 +120,8 @@ class BpmnParser(object):
|
|||
self.process_parsers_by_name = {}
|
||||
self.collaborations = {}
|
||||
self.process_dependencies = set()
|
||||
self.messages = {}
|
||||
self.correlations = {}
|
||||
|
||||
def _get_parser_class(self, tag):
|
||||
if tag in self.OVERRIDE_PARSER_CLASSES:
|
||||
|
@ -179,6 +181,8 @@ class BpmnParser(object):
|
|||
|
||||
self._add_processes(bpmn, filename)
|
||||
self._add_collaborations(bpmn)
|
||||
self._add_messages(bpmn)
|
||||
self._add_correlations(bpmn)
|
||||
|
||||
def _add_processes(self, bpmn, filename=None):
|
||||
for process in bpmn.xpath('.//bpmn:process', namespaces=self.namespaces):
|
||||
|
@ -192,6 +196,43 @@ class BpmnParser(object):
|
|||
name = collaboration.get('id')
|
||||
self.collaborations[name] = [ participant.get('processRef') for participant in collaboration_xpath('.//bpmn:participant') ]
|
||||
|
||||
def _add_messages(self, bpmn):
|
||||
for message in bpmn.xpath('.//bpmn:message', namespaces=self.namespaces):
|
||||
if message.attrib.get("id") is None:
|
||||
raise ValidationException(
|
||||
"Message identifier is missing from bpmn xml"
|
||||
)
|
||||
self.messages[message.attrib.get("id")] = message.attrib.get("name")
|
||||
|
||||
def _add_correlations(self, bpmn):
|
||||
for correlation in bpmn.xpath('.//bpmn:correlationProperty', namespaces=self.namespaces):
|
||||
correlation_identifier = correlation.attrib.get("id")
|
||||
if correlation_identifier is None:
|
||||
raise ValidationException(
|
||||
"Correlation identifier is missing from bpmn xml"
|
||||
)
|
||||
correlation_property_retrieval_expressions = correlation.xpath(
|
||||
"//bpmn:correlationPropertyRetrievalExpression", namespaces = self.namespaces)
|
||||
if not correlation_property_retrieval_expressions:
|
||||
raise ValidationException(
|
||||
f"Correlation is missing correlation property retrieval expressions: {correlation_identifier}"
|
||||
)
|
||||
retrieval_expressions = []
|
||||
for cpre in correlation_property_retrieval_expressions:
|
||||
message_model_identifier = cpre.attrib.get("messageRef")
|
||||
if message_model_identifier is None:
|
||||
raise ValidationException(
|
||||
f"Message identifier is missing from correlation property: {correlation_identifier}"
|
||||
)
|
||||
children = cpre.getchildren()
|
||||
expression = children[0].text if len(children) > 0 else None
|
||||
retrieval_expressions.append({"messageRef": message_model_identifier,
|
||||
"expression": expression})
|
||||
self.correlations[correlation_identifier] = {
|
||||
"name": correlation.attrib.get("name"),
|
||||
"retrieval_expressions": retrieval_expressions
|
||||
}
|
||||
|
||||
def _find_dependencies(self, process):
|
||||
"""Locate all calls to external BPMN, and store their ids in our list of dependencies"""
|
||||
for call_activity in process.xpath('.//bpmn:callActivity', namespaces=self.namespaces):
|
||||
|
|
|
@ -44,7 +44,7 @@ class ProcessParser(NodeParser):
|
|||
self.parsed_nodes = {}
|
||||
self.lane = lane
|
||||
self.spec = None
|
||||
self.process_executable = True
|
||||
self.process_executable = self.is_executable()
|
||||
|
||||
def get_name(self):
|
||||
"""
|
||||
|
@ -52,6 +52,35 @@ class ProcessParser(NodeParser):
|
|||
"""
|
||||
return self.node.get('name', default=self.get_id())
|
||||
|
||||
def has_lanes(self) -> bool:
|
||||
"""Returns true if this process has one or more named lanes """
|
||||
elements = self.xpath("//bpmn:lane")
|
||||
for el in elements:
|
||||
if el.get("name"):
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_executable(self) -> bool:
|
||||
return self.node.get('isExecutable', 'true') == 'true'
|
||||
|
||||
def start_messages(self):
|
||||
""" This returns a list of messages that would cause this
|
||||
process to start. """
|
||||
messages = []
|
||||
message_event_definitions = self.xpath(
|
||||
"//bpmn:startEvent/bpmn:messageEventDefinition")
|
||||
for message_event_definition in message_event_definitions:
|
||||
message_model_identifier = message_event_definition.attrib.get(
|
||||
"messageRef"
|
||||
)
|
||||
if message_model_identifier is None:
|
||||
raise ValidationException(
|
||||
"Could not find messageRef from message event definition: {message_event_definition}"
|
||||
)
|
||||
messages.append(message_model_identifier)
|
||||
|
||||
return messages
|
||||
|
||||
def parse_node(self, node):
|
||||
"""
|
||||
Parses the specified child task node, and returns the task spec. This
|
||||
|
@ -72,7 +101,6 @@ class ProcessParser(NodeParser):
|
|||
def _parse(self):
|
||||
# here we only look in the top level, We will have another
|
||||
# bpmn:startEvent if we have a subworkflow task
|
||||
self.process_executable = self.node.get('isExecutable', 'true') == 'true'
|
||||
start_node_list = self.xpath('./bpmn:startEvent')
|
||||
if not start_node_list and self.process_executable:
|
||||
raise ValidationException("No start event found", node=self.node, filename=self.filename)
|
||||
|
|
|
@ -81,12 +81,6 @@ class MultiInstanceTask(TaskSpec):
|
|||
|
||||
TaskSpec.__init__(self, wf_spec, name, **kwargs)
|
||||
|
||||
|
||||
# DO NOT OVERRIDE THE SPEC TYPE.
|
||||
# @property
|
||||
# def spec_type(self):
|
||||
# return 'MultiInstance Task'
|
||||
|
||||
def _find_my_task(self, task):
|
||||
for thetask in task.workflow.task_tree:
|
||||
if thetask.thread_id != task.thread_id:
|
||||
|
@ -113,17 +107,6 @@ class MultiInstanceTask(TaskSpec):
|
|||
new_task.triggered = True
|
||||
output._predict(new_task)
|
||||
|
||||
def _check_inputs(self, my_task):
|
||||
if self.collection is None:
|
||||
return
|
||||
# look for variable in context, if we don't find it, default to 1
|
||||
variable = valueof(my_task, self.times, 1)
|
||||
if self.times.name == self.collection.name and type(variable) == type([]):
|
||||
raise WorkflowTaskExecException(my_task,
|
||||
'If we are updating a collection,'
|
||||
' then the collection must be a '
|
||||
'dictionary.')
|
||||
|
||||
def _get_loop_completion(self,my_task):
|
||||
if not self.completioncondition == None:
|
||||
terminate = my_task.workflow.script_engine.evaluate(my_task,self.completioncondition)
|
||||
|
@ -154,17 +137,6 @@ class MultiInstanceTask(TaskSpec):
|
|||
return len(variable.keys())
|
||||
return 1 # we shouldn't ever get here, but just in case return a sane value.
|
||||
|
||||
def _get_current_var(self, my_task, pos):
|
||||
variable = valueof(my_task, self.times, 1)
|
||||
if is_number(variable):
|
||||
return pos
|
||||
if isinstance(variable,list) and len(variable) >= pos:
|
||||
return variable[pos - 1]
|
||||
elif isinstance(variable,dict) and len(list(variable.keys())) >= pos:
|
||||
return variable[list(variable.keys())[pos - 1]]
|
||||
else:
|
||||
return pos
|
||||
|
||||
def _get_predicted_outputs(self, my_task):
|
||||
split_n = self._get_count(my_task)
|
||||
|
||||
|
@ -418,52 +390,60 @@ class MultiInstanceTask(TaskSpec):
|
|||
if my_task.task_spec.prevtaskclass in classes.keys() and not terminate:
|
||||
super()._on_complete_hook(my_task)
|
||||
|
||||
def _merge_element_variable(self,my_task,collect,runtimes,colvarname):
|
||||
# if we are updating the same collection as was our loopcardinality
|
||||
# then all the keys should be there and we can use the sorted keylist
|
||||
# if not, we use an integer - we should be guaranteed that the
|
||||
# collection is a dictionary
|
||||
def _check_inputs(self, my_task):
|
||||
if self.collection is None:
|
||||
return
|
||||
# look for variable in context, if we don't find it, default to 1
|
||||
variable = valueof(my_task, self.times, 1)
|
||||
if self.times.name == self.collection.name and type(variable) == type([]):
|
||||
raise WorkflowTaskExecException(my_task,
|
||||
'If we are updating a collection, then the collection must be a dictionary.')
|
||||
|
||||
def _get_current_var(self, my_task, pos):
|
||||
variable = valueof(my_task, self.times, 1)
|
||||
if is_number(variable):
|
||||
return pos
|
||||
if isinstance(variable,list) and len(variable) >= pos:
|
||||
return variable[pos - 1]
|
||||
elif isinstance(variable,dict) and len(list(variable.keys())) >= pos:
|
||||
return variable[list(variable.keys())[pos - 1]]
|
||||
else:
|
||||
return pos
|
||||
|
||||
def _merge_element_variable(self, my_task, collect, runtimes):
|
||||
if self.collection is not None and self.times.name == self.collection.name:
|
||||
# Update an existing collection (we used the collection as the cardinality)
|
||||
keys = list(collect.keys())
|
||||
if len(keys) < runtimes:
|
||||
msg = f"There is a mismatch between runtimes and the number " \
|
||||
f"items in the collection, please check for empty " \
|
||||
f"collection {self.collection.name}."
|
||||
raise WorkflowTaskExecException(my_task, msg)
|
||||
|
||||
runtimesvar = keys[runtimes - 1]
|
||||
else:
|
||||
# Use an integer (for arrays)
|
||||
runtimesvar = runtimes
|
||||
|
||||
if self.elementVar in my_task.data and isinstance(my_task.data[self.elementVar], dict):
|
||||
collect[str(runtimesvar)] = DeepMerge.merge(collect.get(runtimesvar, {}),
|
||||
copy.copy(my_task.data[self.elementVar]))
|
||||
collect[str(runtimesvar)] = DeepMerge.merge(
|
||||
collect.get(runtimesvar, {}),
|
||||
copy.copy(my_task.data[self.elementVar])
|
||||
)
|
||||
|
||||
my_task.data = DeepMerge.merge(my_task.data,
|
||||
gendict(colvarname.split('/'), collect))
|
||||
def _update_sibling_data(self, my_task, runtimes, runcount, colvarname, collect):
|
||||
|
||||
|
||||
def _update_sibling_data(self,my_task,runtimes,runcount,colvarname,collect):
|
||||
if (runtimes < runcount) and not my_task.terminate_current_loop and self.loopTask:
|
||||
my_task._set_state(TaskState.READY)
|
||||
my_task._set_internal_data(runtimes=runtimes + 1)
|
||||
my_task.data[self.elementVar] = self._get_current_var(my_task, runtimes + 1)
|
||||
element_var_data = None
|
||||
else:
|
||||
# The element var data should not be passed on to children
|
||||
# but will add this back onto this task later.
|
||||
element_var_data = my_task.data.pop(self.elementVar, None)
|
||||
my_task.data.pop(self.elementVar, None)
|
||||
|
||||
# if this is a parallel mi - then update all siblings with the
|
||||
# current data
|
||||
if not self.isSequential:
|
||||
for task in my_task.parent.children:
|
||||
task.data = DeepMerge.merge(
|
||||
task.data,
|
||||
gendict(colvarname.split('/'),
|
||||
collect)
|
||||
)
|
||||
return element_var_data
|
||||
for task in my_task.parent.children:
|
||||
task.data = DeepMerge.merge(
|
||||
task.data,
|
||||
gendict(colvarname.split('/'), collect)
|
||||
)
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
# do special stuff for non-user tasks
|
||||
|
@ -486,9 +466,9 @@ class MultiInstanceTask(TaskSpec):
|
|||
|
||||
collect = valueof(my_task, self.collection, {})
|
||||
|
||||
self._merge_element_variable(my_task,collect,runtimes,colvarname)
|
||||
self._merge_element_variable(my_task, collect, runtimes)
|
||||
|
||||
element_var_data = self._update_sibling_data(my_task,runtimes,runcount,colvarname,collect)
|
||||
self._update_sibling_data(my_task, runtimes, runcount, colvarname, collect)
|
||||
|
||||
# please see MultiInstance code for previous version
|
||||
outputs = []
|
||||
|
@ -497,14 +477,6 @@ class MultiInstanceTask(TaskSpec):
|
|||
if not isinstance(my_task.task_spec,SubWorkflowTask):
|
||||
my_task._sync_children(outputs, TaskState.FUTURE)
|
||||
|
||||
for child in my_task.children:
|
||||
child.task_spec._update(child)
|
||||
|
||||
# If removed, add the element_var_data back onto this task, after
|
||||
# updating the children.
|
||||
if(element_var_data):
|
||||
my_task.data[self.elementVar] = element_var_data
|
||||
|
||||
def serialize(self, serializer):
|
||||
|
||||
return serializer.serialize_multi_instance(self)
|
||||
|
|
|
@ -66,8 +66,6 @@ class SubWorkflowTask(BpmnSpecMixin):
|
|||
|
||||
def _on_complete_hook(self, my_task):
|
||||
BpmnSpecMixin._on_complete_hook(self, my_task)
|
||||
for child in my_task.children:
|
||||
child.task_spec._update(child)
|
||||
|
||||
def _on_cancel(self, my_task):
|
||||
subworkflow = my_task.workflow.get_subprocess(my_task)
|
||||
|
|
|
@ -146,8 +146,6 @@ class UnstructuredJoin(Join, BpmnSpecMixin):
|
|||
|
||||
def _update_hook(self, my_task):
|
||||
|
||||
if my_task._is_predicted():
|
||||
self._predict(my_task)
|
||||
if not my_task.parent._is_finished():
|
||||
return
|
||||
|
||||
|
|
|
@ -95,7 +95,8 @@ class DMNEngine:
|
|||
# If we get here, we need to check whether the match expression includes
|
||||
# an operator or if can use '=='
|
||||
needs_eq = self.needs_eq(script_engine, match_expr)
|
||||
expr = input_expr + ' == ' + match_expr if needs_eq else input_expr + match_expr
|
||||
# Disambiguate cases like a == 0 == True when we add '=='
|
||||
expr = f'({input_expr}) == ({match_expr})' if needs_eq else input_expr + match_expr
|
||||
return script_engine.evaluate(task, expr)
|
||||
|
||||
@staticmethod
|
||||
|
|
|
@ -88,8 +88,6 @@ class ExclusiveChoice(MultiChoice):
|
|||
f'No conditions satisfied for {my_task.task_spec.name}')
|
||||
|
||||
my_task._sync_children([output], TaskState.FUTURE)
|
||||
for child in my_task.children:
|
||||
child.task_spec._update(child)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_exclusive_choice(self)
|
||||
|
|
|
@ -134,8 +134,6 @@ class MultiChoice(TaskSpec):
|
|||
outputs.append(self._wf_spec.get_task_spec_from_name(output))
|
||||
|
||||
my_task._sync_children(outputs, TaskState.FUTURE)
|
||||
for child in my_task.children:
|
||||
child.task_spec._update(child)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_multi_choice(self)
|
||||
|
|
|
@ -102,8 +102,6 @@ class MultiInstance(TaskSpec):
|
|||
def _on_complete_hook(self, my_task):
|
||||
outputs = self._get_predicted_outputs(my_task)
|
||||
my_task._sync_children(outputs, TaskState.FUTURE)
|
||||
for child in my_task.children:
|
||||
child.task_spec._update(child)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_multi_instance(self)
|
||||
|
|
|
@ -76,12 +76,14 @@ class SubWorkflow(TaskSpec):
|
|||
self, 'File does not exist: %s' % self.file)
|
||||
|
||||
def _predict_hook(self, my_task):
|
||||
# Modifying the task spec is a TERRIBLE idea, but if we don't do it, sync_children won't work
|
||||
outputs = [task.task_spec for task in my_task.children]
|
||||
for output in self.outputs:
|
||||
if output not in outputs:
|
||||
outputs.insert(0, output)
|
||||
if my_task._is_definite():
|
||||
my_task._sync_children(outputs, TaskState.FUTURE)
|
||||
# This prevents errors with sync children
|
||||
my_task._sync_children(outputs, TaskState.LIKELY)
|
||||
else:
|
||||
my_task._sync_children(outputs, my_task.state)
|
||||
|
||||
|
@ -107,10 +109,7 @@ class SubWorkflow(TaskSpec):
|
|||
|
||||
def _integrate_subworkflow_tree(self, my_task, subworkflow):
|
||||
# Integrate the tree of the subworkflow into the tree of this workflow.
|
||||
my_task._sync_children(self.outputs, TaskState.FUTURE)
|
||||
for child in my_task.children:
|
||||
child.task_spec._update(child)
|
||||
child._inherit_data()
|
||||
my_task._sync_children(self.outputs, TaskState.LIKELY)
|
||||
for child in subworkflow.task_tree.children:
|
||||
my_task.children.insert(0, child)
|
||||
child.parent = my_task
|
||||
|
@ -121,10 +120,18 @@ class SubWorkflow(TaskSpec):
|
|||
for child in subworkflow.task_tree.children:
|
||||
for assignment in self.in_assign:
|
||||
assignment.assign(my_task, child)
|
||||
|
||||
self._predict(my_task)
|
||||
for child in subworkflow.task_tree.children:
|
||||
child.task_spec._update(child)
|
||||
# Instead of completing immediately, we'll wait for the subworkflow to complete
|
||||
my_task._set_state(TaskState.WAITING)
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
subworkflow = my_task._get_internal_data('subworkflow')
|
||||
if subworkflow is None:
|
||||
# On the first update, we have to create the subworkflow
|
||||
super()._update_hook(my_task)
|
||||
elif subworkflow.is_completed():
|
||||
# Then wait until it finishes to complete
|
||||
my_task.complete()
|
||||
|
||||
def _on_subworkflow_completed(self, subworkflow, my_task):
|
||||
# Assign variables, if so requested.
|
||||
|
@ -138,11 +145,6 @@ class SubWorkflow(TaskSpec):
|
|||
# Alright, abusing that hook is just evil but it works.
|
||||
child.task_spec._update_hook(child)
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
for child in my_task.children:
|
||||
if isinstance(child.task_spec, StartTask):
|
||||
child.task_spec._update(child)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_sub_workflow(self)
|
||||
|
||||
|
|
|
@ -133,8 +133,6 @@ class ThreadSplit(TaskSpec):
|
|||
for i in range(split_n):
|
||||
outputs.append(self.thread_starter)
|
||||
my_task._sync_children(outputs, TaskState.FUTURE)
|
||||
for child in my_task.children:
|
||||
child.task_spec._update(child)
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_thread_split(self)
|
||||
|
|
|
@ -242,24 +242,19 @@ class TaskSpec(object):
|
|||
:type looked_ahead: integer
|
||||
:param looked_ahead: The depth of the predicted path so far.
|
||||
"""
|
||||
if my_task._is_finished():
|
||||
return
|
||||
if seen is None:
|
||||
seen = []
|
||||
elif self in seen:
|
||||
return
|
||||
if not my_task._is_finished():
|
||||
self._predict_hook(my_task)
|
||||
|
||||
self._predict_hook(my_task)
|
||||
if not my_task._is_definite():
|
||||
if looked_ahead + 1 >= self.lookahead:
|
||||
return
|
||||
seen.append(self)
|
||||
look_ahead = my_task._is_definite() or looked_ahead + 1 < self.lookahead
|
||||
for child in my_task.children:
|
||||
child.task_spec._predict(child, seen[:], looked_ahead + 1)
|
||||
if not child._is_finished() and child not in seen and look_ahead:
|
||||
child.task_spec._predict(child, seen[:], looked_ahead + 1)
|
||||
|
||||
def _predict_hook(self, my_task):
|
||||
# If the task's status is not predicted, we default to FUTURE
|
||||
# for all it's outputs.
|
||||
# If the task's status is not predicted, we default to FUTURE for all it's outputs.
|
||||
# Otherwise, copy my own state to the children.
|
||||
if my_task._is_definite():
|
||||
best_state = TaskState.FUTURE
|
||||
|
@ -278,6 +273,12 @@ class TaskSpec(object):
|
|||
completes it makes sure to call this method so we can react.
|
||||
"""
|
||||
my_task._inherit_data()
|
||||
# We were doing this in _update_hook, but to me that seems inconsistent with the spirit
|
||||
# of the hook functions. Moving it here allows removal of some repeated calls (overridden
|
||||
# hook methods still need to do these things)
|
||||
if my_task._is_predicted():
|
||||
self._predict(my_task)
|
||||
self.entered_event.emit(my_task.workflow, my_task)
|
||||
self._update_hook(my_task)
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
|
@ -290,11 +291,8 @@ class TaskSpec(object):
|
|||
Returning non-False will cause the task to go into READY.
|
||||
Returning any other value will cause no action.
|
||||
"""
|
||||
if my_task._is_predicted():
|
||||
self._predict(my_task)
|
||||
if not my_task.parent._is_finished():
|
||||
return
|
||||
self.entered_event.emit(my_task.workflow, my_task)
|
||||
# If this actually did what the documentation said (returned a value indicating
|
||||
# that the task was ready), then a lot of things might be easier.
|
||||
my_task._ready()
|
||||
|
||||
def _on_ready(self, my_task):
|
||||
|
@ -387,21 +385,14 @@ class TaskSpec(object):
|
|||
"""
|
||||
assert my_task is not None
|
||||
|
||||
if my_task.workflow.debug:
|
||||
print("Executing %s: %s (%s)" % (
|
||||
my_task.task_spec.__class__.__name__,
|
||||
my_task.get_name(), my_task.get_description()))
|
||||
|
||||
# We have to set the last task here, because the on_complete_hook
|
||||
# of a loopback task may overwrite what the last_task will be.
|
||||
my_task.workflow.last_task = my_task
|
||||
self._on_complete_hook(my_task)
|
||||
for child in my_task.children:
|
||||
child.task_spec._update(child)
|
||||
my_task.workflow._task_completed_notify(my_task)
|
||||
|
||||
if my_task.workflow.debug:
|
||||
if hasattr(my_task.workflow, "outer_workflow"):
|
||||
my_task.workflow.outer_workflow.task_tree.dump()
|
||||
|
||||
self.completed_event.emit(my_task.workflow, my_task)
|
||||
return True
|
||||
|
||||
|
@ -414,9 +405,7 @@ class TaskSpec(object):
|
|||
:rtype: bool
|
||||
:returns: True on success, False otherwise.
|
||||
"""
|
||||
# If we have more than one output, implicitly split.
|
||||
for child in my_task.children:
|
||||
child.task_spec._update(child)
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def serialize(self, serializer, **kwargs):
|
||||
|
@ -478,8 +467,6 @@ class TaskSpec(object):
|
|||
:rtype: TaskSpec
|
||||
:returns: The task specification instance.
|
||||
"""
|
||||
print(s_state)
|
||||
print(wf_spec)
|
||||
out = cls(wf_spec,s_state.get('name'))
|
||||
out.id = s_state.get('id')
|
||||
out.name = s_state.get('name')
|
||||
|
|
|
@ -23,7 +23,7 @@ class SpiffTaskParser(TaskParser):
|
|||
extensions = {}
|
||||
extra_ns = {SPIFFWORKFLOW_MODEL_PREFIX: SPIFFWORKFLOW_MODEL_NS}
|
||||
xpath = xpath_eval(node, extra_ns)
|
||||
extension_nodes = xpath(f'.//bpmn:extensionElements/{SPIFFWORKFLOW_MODEL_PREFIX}:*')
|
||||
extension_nodes = xpath(f'./bpmn:extensionElements/{SPIFFWORKFLOW_MODEL_PREFIX}:*')
|
||||
for node in extension_nodes:
|
||||
name = etree.QName(node).localname
|
||||
if name == 'properties':
|
||||
|
|
|
@ -39,6 +39,6 @@ class SpiffBpmnTask(BpmnSpecMixin):
|
|||
self.execute_script(my_task, self.prescript)
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
super()._on_complete_hook(my_task)
|
||||
if self.postscript is not None:
|
||||
self.execute_script(my_task, self.postscript)
|
||||
super()._on_complete_hook(my_task)
|
|
@ -522,46 +522,42 @@ class Task(object, metaclass=DeprecatedMetaTask):
|
|||
"""
|
||||
if task_specs is None:
|
||||
raise ValueError('"task_specs" argument is None')
|
||||
add = task_specs[:]
|
||||
new_children = task_specs[:]
|
||||
|
||||
# If a child task_spec is also an ancestor, we are looping back,
|
||||
# replace those specs with a loopReset task.
|
||||
root_task = self._get_root()
|
||||
for index, task_spec in enumerate(add):
|
||||
for index, task_spec in enumerate(new_children):
|
||||
ancestor_task = self._find_ancestor(task_spec)
|
||||
if ancestor_task and ancestor_task != root_task:
|
||||
destination = ancestor_task
|
||||
new_spec = self.workflow.get_reset_task_spec(destination)
|
||||
new_spec.outputs = []
|
||||
new_spec.inputs = task_spec.inputs
|
||||
add[index] = new_spec
|
||||
new_children[index] = new_spec
|
||||
|
||||
# Create a list of all children that are no longer needed.
|
||||
remove = []
|
||||
unneeded_children = []
|
||||
for child in self.children:
|
||||
# Triggered tasks are never removed.
|
||||
if child.triggered:
|
||||
continue
|
||||
|
||||
# Check whether the task needs to be removed.
|
||||
if child.task_spec in add:
|
||||
add.remove(child.task_spec)
|
||||
# If the task already exists, remove it from to-be-added
|
||||
if child.task_spec in new_children:
|
||||
new_children.remove(child.task_spec)
|
||||
# We should set the state here but that breaks everything
|
||||
continue
|
||||
|
||||
# Non-predicted tasks must not be removed, so they HAVE to be in
|
||||
# the given task spec list.
|
||||
# Definite tasks must not be removed, so they HAVE to be in the given task spec list.
|
||||
if child._is_definite():
|
||||
raise WorkflowException(self.task_spec,
|
||||
'removal of non-predicted child %s' %
|
||||
repr(child))
|
||||
remove.append(child)
|
||||
|
||||
|
||||
raise WorkflowException(self.task_spec, f'removal of non-predicted child {child}')
|
||||
unneeded_children.append(child)
|
||||
|
||||
# Remove and add the children accordingly.
|
||||
for child in remove:
|
||||
for child in unneeded_children:
|
||||
self.children.remove(child)
|
||||
for task_spec in add:
|
||||
for task_spec in new_children:
|
||||
self._add_child(task_spec, state)
|
||||
|
||||
def _set_likely_task(self, task_specs):
|
||||
|
@ -574,7 +570,6 @@ class Task(object, metaclass=DeprecatedMetaTask):
|
|||
if child._is_definite():
|
||||
continue
|
||||
child._set_state(TaskState.LIKELY)
|
||||
return
|
||||
|
||||
def _is_descendant_of(self, parent):
|
||||
"""
|
||||
|
|
|
@ -19,11 +19,15 @@ class BpmnWorkflowTestCase(unittest.TestCase):
|
|||
|
||||
serializer = BpmnWorkflowSerializer(wf_spec_converter)
|
||||
|
||||
def load_workflow_spec(self, filename, process_name, validate=True):
|
||||
def get_parser(self, filename, validate=True):
|
||||
f = os.path.join(os.path.dirname(__file__), 'data', filename)
|
||||
validator = BpmnValidator() if validate else None
|
||||
parser = TestBpmnParser(validator=validator)
|
||||
parser.add_bpmn_files_by_glob(f)
|
||||
return parser
|
||||
|
||||
def load_workflow_spec(self, filename, process_name, validate=True):
|
||||
parser = self.get_parser(filename, validate)
|
||||
top_level_spec = parser.get_spec(process_name)
|
||||
subprocesses = parser.get_subprocess_specs(process_name)
|
||||
return top_level_spec, subprocesses
|
||||
|
|
|
@ -6,10 +6,22 @@ from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
|||
|
||||
class CollaborationTest(BpmnWorkflowTestCase):
|
||||
|
||||
def testParserProvidesInfoOnMessagesAndCorrelations(self):
|
||||
parser = self.get_parser('collaboration.bpmn')
|
||||
self.assertEqual(list(parser.messages.keys()), ['love_letter', 'love_letter_response'])
|
||||
self.assertEqual(parser.correlations,
|
||||
{'lover_name': {'name': "Lover's Name",
|
||||
'retrieval_expressions': [
|
||||
{'expression': 'lover_name',
|
||||
'messageRef': 'love_letter'},
|
||||
{'expression': 'from_name',
|
||||
'messageRef': 'love_letter_response'}]}}
|
||||
)
|
||||
|
||||
def testCollaboration(self):
|
||||
|
||||
spec, subprocesses = self.load_collaboration('collaboration.bpmn', 'my_collaboration')
|
||||
|
||||
|
||||
# Only executable processes should be started
|
||||
self.assertIn('process_buddy', subprocesses)
|
||||
self.assertNotIn('random_person_process', subprocesses)
|
||||
|
@ -122,4 +134,4 @@ class CollaborationTest(BpmnWorkflowTestCase):
|
|||
start = self.workflow.get_tasks_from_spec_name('Start')[0]
|
||||
start.data['lover_name'] = 'Peggy'
|
||||
self.workflow.do_engine_steps()
|
||||
self.save_restore()
|
||||
self.save_restore()
|
||||
|
|
|
@ -19,6 +19,12 @@ class SwimLaneTest(BpmnWorkflowTestCase):
|
|||
spec, subprocesses = self.load_workflow_spec('lanes.bpmn','lanes')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
|
||||
def testBpmnParserKnowsLanesExist(self):
|
||||
parser = self.get_parser('lanes.bpmn')
|
||||
self.assertTrue(parser.get_process_parser('lanes').has_lanes())
|
||||
parser = self.get_parser('random_fact.bpmn')
|
||||
self.assertFalse(parser.get_process_parser('random_fact').has_lanes())
|
||||
|
||||
def testRunThroughHappy(self):
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
|
|
|
@ -23,13 +23,17 @@ class BaseTestCase(BpmnWorkflowTestCase):
|
|||
|
||||
serializer = BpmnWorkflowSerializer(wf_spec_converter)
|
||||
|
||||
def load_workflow_spec(self, filename, process_name, dmn_filename=None):
|
||||
bpmn = os.path.join(os.path.dirname(__file__), 'data', filename)
|
||||
def get_parser(self, filename, dmn_filename=None):
|
||||
f = os.path.join(os.path.dirname(__file__), 'data', filename)
|
||||
parser = CamundaParser()
|
||||
parser.add_bpmn_files_by_glob(bpmn)
|
||||
parser.add_bpmn_files_by_glob(f)
|
||||
if dmn_filename is not None:
|
||||
dmn = os.path.join(os.path.dirname(__file__), 'data', 'dmn', dmn_filename)
|
||||
parser.add_dmn_files_by_glob(dmn)
|
||||
return parser
|
||||
|
||||
def load_workflow_spec(self, filename, process_name, dmn_filename=None):
|
||||
parser = self.get_parser(filename, dmn_filename)
|
||||
top_level_spec = parser.get_spec(process_name)
|
||||
subprocesses = parser.get_subprocess_specs(process_name)
|
||||
return top_level_spec, subprocesses
|
||||
|
|
|
@ -70,11 +70,9 @@ class MultiInstanceParallelArrayTest(BaseTestCase):
|
|||
{"CurrentFamilyMember": {"Birthdate": "10/05/1985" + str(x)}})
|
||||
self.workflow.do_engine_steps()
|
||||
self.workflow.complete_task_from_id(task.id)
|
||||
# The data should still be available on the current task.
|
||||
self.assertEqual({'FirstName': "The Funk #%i" % x,
|
||||
'Birthdate': '10/05/1985' + str(x)},
|
||||
self.workflow.get_task(task.id)
|
||||
.data['CurrentFamilyMember'])
|
||||
# We used to check that the current data variable was available in the task,
|
||||
# but there's no reason to preserve it after the task completes. We removed it
|
||||
# in some cases and left it in others, which just adds to the confusion.
|
||||
self.workflow.do_engine_steps()
|
||||
if save_restore:
|
||||
self.reload_save_restore()
|
||||
|
|
|
@ -44,8 +44,7 @@ class ResetTokenTestMIParallel(BaseTestCase):
|
|||
self.workflow.do_engine_steps()
|
||||
if save_restore: self.save_restore()
|
||||
|
||||
self.assertEqual({'current': {'A': 'y'},
|
||||
'do_step': 'Yes',
|
||||
self.assertEqual({'do_step': 'Yes',
|
||||
'output': {'1': {'A': 'x'}, '2': {'A': 'y'}, '3': {'A': 'z'}}},
|
||||
self.workflow.last_task.data)
|
||||
|
||||
|
@ -66,8 +65,7 @@ class ResetTokenTestMIParallel(BaseTestCase):
|
|||
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
self.assertEqual({'current': {'A': 'x'},
|
||||
'do_step': 'Yes',
|
||||
self.assertEqual({'do_step': 'Yes',
|
||||
'C': 'c',
|
||||
'output': {'1': {'A': 'a1'},
|
||||
'2': {'A': 'y'},
|
||||
|
@ -75,11 +73,6 @@ class ResetTokenTestMIParallel(BaseTestCase):
|
|||
self.workflow.last_task.data)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(ResetTokenTestMIParallel)
|
||||
|
||||
|
|
|
@ -14,6 +14,16 @@ class StartMessageTest(BaseTestCase):
|
|||
self.spec, self.subprocesses = self.load_workflow_spec('message_test.bpmn', 'ThrowCatch')
|
||||
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
|
||||
|
||||
def testParserCanReturnStartMessages(self):
|
||||
parser = self.get_parser('message_test.bpmn')
|
||||
self.assertEqual(
|
||||
parser.process_parsers['ThrowCatch'].start_messages(), ['Message_1rkbi27'])
|
||||
|
||||
parser = self.get_parser('random_fact.bpmn')
|
||||
self.assertEqual(
|
||||
parser.process_parsers['random_fact'].start_messages(), [])
|
||||
|
||||
|
||||
def testRunThroughHappy(self):
|
||||
self.actual_test(save_restore=False)
|
||||
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
Start
|
||||
first
|
||||
excl_choice_1
|
||||
sub_workflow_1
|
||||
Start
|
||||
first
|
||||
excl_choice_1
|
||||
last
|
||||
End
|
||||
sub_workflow_1
|
||||
last
|
||||
End
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
Start
|
||||
first
|
||||
sub_workflow_1
|
||||
Start
|
||||
first
|
||||
last
|
||||
End
|
||||
sub_workflow_1
|
||||
second
|
||||
join
|
||||
last
|
||||
|
|
|
@ -38,6 +38,8 @@ class TaskSpecTest(unittest.TestCase):
|
|||
def do_next_unique_task(self, name):
|
||||
# This method asserts that there is only one ready task! The specified
|
||||
# one - and then completes it
|
||||
for task in self.workflow.get_tasks(TaskState.WAITING):
|
||||
task.task_spec._update(task)
|
||||
ready_tasks = self.workflow.get_tasks(TaskState.READY)
|
||||
self.assertEqual(1, len(ready_tasks))
|
||||
task = ready_tasks[0]
|
||||
|
@ -58,12 +60,13 @@ class TaskSpecTest(unittest.TestCase):
|
|||
self.load_workflow_spec('data', 'block_to_subworkflow.xml')
|
||||
self.do_next_unique_task('Start')
|
||||
self.do_next_unique_task('first')
|
||||
self.do_next_unique_task('sub_workflow_1')
|
||||
# Inner:
|
||||
|
||||
# Inner. The subworkflow task will complete automatically after the subwokflow completes
|
||||
self.do_next_unique_task('Start')
|
||||
self.do_next_unique_task('first')
|
||||
self.do_next_unique_task('last')
|
||||
self.do_next_unique_task('End')
|
||||
|
||||
# Back to outer:
|
||||
self.do_next_unique_task('last')
|
||||
self.do_next_unique_task('End')
|
||||
|
@ -72,7 +75,7 @@ class TaskSpecTest(unittest.TestCase):
|
|||
self.load_workflow_spec('data', 'subworkflow_to_block.xml')
|
||||
self.do_next_unique_task('Start')
|
||||
self.do_next_unique_task('first')
|
||||
self.do_next_unique_task('sub_workflow_1')
|
||||
|
||||
# Inner:
|
||||
self.do_next_unique_task('Start')
|
||||
self.do_next_unique_task('first')
|
||||
|
@ -86,8 +89,9 @@ class TaskSpecTest(unittest.TestCase):
|
|||
self.load_workflow_spec('control-flow', 'subworkflow_to_join.xml')
|
||||
self.do_next_unique_task('Start')
|
||||
self.do_next_unique_task('first')
|
||||
self.do_next_named_step('second', ['sub_workflow_1'])
|
||||
self.do_next_unique_task('sub_workflow_1')
|
||||
# The subworkflow task now sets its child tasks to READY and waits
|
||||
self.do_next_named_step('second', ['Start'])
|
||||
|
||||
# Inner:
|
||||
self.do_next_unique_task('Start')
|
||||
self.do_next_unique_task('first')
|
||||
|
@ -102,8 +106,8 @@ class TaskSpecTest(unittest.TestCase):
|
|||
self.load_workflow_spec('control-flow', 'subworkflow_to_join.xml')
|
||||
self.do_next_unique_task('Start')
|
||||
self.do_next_unique_task('first')
|
||||
self.do_next_named_step('second', ['sub_workflow_1'])
|
||||
self.do_next_unique_task('sub_workflow_1')
|
||||
self.do_next_named_step('second', ['Start'])
|
||||
|
||||
# Inner:
|
||||
self.do_next_unique_task('Start')
|
||||
self.do_next_unique_task('first')
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from builtins import str
|
||||
from builtins import range
|
||||
import time
|
||||
from SpiffWorkflow.task import Task, TaskState
|
||||
from SpiffWorkflow.workflow import Workflow
|
||||
|
@ -33,7 +31,6 @@ def on_reached_cb(workflow, task, taken_path):
|
|||
props = []
|
||||
for key, value in list(task.task_spec.data.items()):
|
||||
props.append('='.join((key, str(value))))
|
||||
# print "REACHED:", task.get_name(), atts, props
|
||||
|
||||
# Store the list of data in the workflow.
|
||||
atts = ';'.join(atts)
|
||||
|
|
|
@ -0,0 +1,132 @@
|
|||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
pip-wheel-metadata/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# Intellij
|
||||
.idea/
|
|
@ -0,0 +1,504 @@
|
|||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
Version 2.1, February 1999
|
||||
|
||||
Copyright (C) 1991, 1999 Free Software Foundation, Inc.
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
[This is the first released version of the Lesser GPL. It also counts
|
||||
as the successor of the GNU Library Public License, version 2, hence
|
||||
the version number 2.1.]
|
||||
|
||||
Preamble
|
||||
|
||||
The licenses for most software are designed to take away your
|
||||
freedom to share and change it. By contrast, the GNU General Public
|
||||
Licenses are intended to guarantee your freedom to share and change
|
||||
free software--to make sure the software is free for all its users.
|
||||
|
||||
This license, the Lesser General Public License, applies to some
|
||||
specially designated software packages--typically libraries--of the
|
||||
Free Software Foundation and other authors who decide to use it. You
|
||||
can use it too, but we suggest you first think carefully about whether
|
||||
this license or the ordinary General Public License is the better
|
||||
strategy to use in any particular case, based on the explanations below.
|
||||
|
||||
When we speak of free software, we are referring to freedom of use,
|
||||
not price. Our General Public Licenses are designed to make sure that
|
||||
you have the freedom to distribute copies of free software (and charge
|
||||
for this service if you wish); that you receive source code or can get
|
||||
it if you want it; that you can change the software and use pieces of
|
||||
it in new free programs; and that you are informed that you can do
|
||||
these things.
|
||||
|
||||
To protect your rights, we need to make restrictions that forbid
|
||||
distributors to deny you these rights or to ask you to surrender these
|
||||
rights. These restrictions translate to certain responsibilities for
|
||||
you if you distribute copies of the library or if you modify it.
|
||||
|
||||
For example, if you distribute copies of the library, whether gratis
|
||||
or for a fee, you must give the recipients all the rights that we gave
|
||||
you. You must make sure that they, too, receive or can get the source
|
||||
code. If you link other code with the library, you must provide
|
||||
complete object files to the recipients, so that they can relink them
|
||||
with the library after making changes to the library and recompiling
|
||||
it. And you must show them these terms so they know their rights.
|
||||
|
||||
We protect your rights with a two-step method: (1) we copyright the
|
||||
library, and (2) we offer you this license, which gives you legal
|
||||
permission to copy, distribute and/or modify the library.
|
||||
|
||||
To protect each distributor, we want to make it very clear that
|
||||
there is no warranty for the free library. Also, if the library is
|
||||
modified by someone else and passed on, the recipients should know
|
||||
that what they have is not the original version, so that the original
|
||||
author's reputation will not be affected by problems that might be
|
||||
introduced by others.
|
||||
|
||||
Finally, software patents pose a constant threat to the existence of
|
||||
any free program. We wish to make sure that a company cannot
|
||||
effectively restrict the users of a free program by obtaining a
|
||||
restrictive license from a patent holder. Therefore, we insist that
|
||||
any patent license obtained for a version of the library must be
|
||||
consistent with the full freedom of use specified in this license.
|
||||
|
||||
Most GNU software, including some libraries, is covered by the
|
||||
ordinary GNU General Public License. This license, the GNU Lesser
|
||||
General Public License, applies to certain designated libraries, and
|
||||
is quite different from the ordinary General Public License. We use
|
||||
this license for certain libraries in order to permit linking those
|
||||
libraries into non-free programs.
|
||||
|
||||
When a program is linked with a library, whether statically or using
|
||||
a shared library, the combination of the two is legally speaking a
|
||||
combined work, a derivative of the original library. The ordinary
|
||||
General Public License therefore permits such linking only if the
|
||||
entire combination fits its criteria of freedom. The Lesser General
|
||||
Public License permits more lax criteria for linking other code with
|
||||
the library.
|
||||
|
||||
We call this license the "Lesser" General Public License because it
|
||||
does Less to protect the user's freedom than the ordinary General
|
||||
Public License. It also provides other free software developers Less
|
||||
of an advantage over competing non-free programs. These disadvantages
|
||||
are the reason we use the ordinary General Public License for many
|
||||
libraries. However, the Lesser license provides advantages in certain
|
||||
special circumstances.
|
||||
|
||||
For example, on rare occasions, there may be a special need to
|
||||
encourage the widest possible use of a certain library, so that it becomes
|
||||
a de-facto standard. To achieve this, non-free programs must be
|
||||
allowed to use the library. A more frequent case is that a free
|
||||
library does the same job as widely used non-free libraries. In this
|
||||
case, there is little to gain by limiting the free library to free
|
||||
software only, so we use the Lesser General Public License.
|
||||
|
||||
In other cases, permission to use a particular library in non-free
|
||||
programs enables a greater number of people to use a large body of
|
||||
free software. For example, permission to use the GNU C Library in
|
||||
non-free programs enables many more people to use the whole GNU
|
||||
operating system, as well as its variant, the GNU/Linux operating
|
||||
system.
|
||||
|
||||
Although the Lesser General Public License is Less protective of the
|
||||
users' freedom, it does ensure that the user of a program that is
|
||||
linked with the Library has the freedom and the wherewithal to run
|
||||
that program using a modified version of the Library.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow. Pay close attention to the difference between a
|
||||
"work based on the library" and a "work that uses the library". The
|
||||
former contains code derived from the library, whereas the latter must
|
||||
be combined with the library in order to run.
|
||||
|
||||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. This License Agreement applies to any software library or other
|
||||
program which contains a notice placed by the copyright holder or
|
||||
other authorized party saying it may be distributed under the terms of
|
||||
this Lesser General Public License (also called "this License").
|
||||
Each licensee is addressed as "you".
|
||||
|
||||
A "library" means a collection of software functions and/or data
|
||||
prepared so as to be conveniently linked with application programs
|
||||
(which use some of those functions and data) to form executables.
|
||||
|
||||
The "Library", below, refers to any such software library or work
|
||||
which has been distributed under these terms. A "work based on the
|
||||
Library" means either the Library or any derivative work under
|
||||
copyright law: that is to say, a work containing the Library or a
|
||||
portion of it, either verbatim or with modifications and/or translated
|
||||
straightforwardly into another language. (Hereinafter, translation is
|
||||
included without limitation in the term "modification".)
|
||||
|
||||
"Source code" for a work means the preferred form of the work for
|
||||
making modifications to it. For a library, complete source code means
|
||||
all the source code for all modules it contains, plus any associated
|
||||
interface definition files, plus the scripts used to control compilation
|
||||
and installation of the library.
|
||||
|
||||
Activities other than copying, distribution and modification are not
|
||||
covered by this License; they are outside its scope. The act of
|
||||
running a program using the Library is not restricted, and output from
|
||||
such a program is covered only if its contents constitute a work based
|
||||
on the Library (independent of the use of the Library in a tool for
|
||||
writing it). Whether that is true depends on what the Library does
|
||||
and what the program that uses the Library does.
|
||||
|
||||
1. You may copy and distribute verbatim copies of the Library's
|
||||
complete source code as you receive it, in any medium, provided that
|
||||
you conspicuously and appropriately publish on each copy an
|
||||
appropriate copyright notice and disclaimer of warranty; keep intact
|
||||
all the notices that refer to this License and to the absence of any
|
||||
warranty; and distribute a copy of this License along with the
|
||||
Library.
|
||||
|
||||
You may charge a fee for the physical act of transferring a copy,
|
||||
and you may at your option offer warranty protection in exchange for a
|
||||
fee.
|
||||
|
||||
2. You may modify your copy or copies of the Library or any portion
|
||||
of it, thus forming a work based on the Library, and copy and
|
||||
distribute such modifications or work under the terms of Section 1
|
||||
above, provided that you also meet all of these conditions:
|
||||
|
||||
a) The modified work must itself be a software library.
|
||||
|
||||
b) You must cause the files modified to carry prominent notices
|
||||
stating that you changed the files and the date of any change.
|
||||
|
||||
c) You must cause the whole of the work to be licensed at no
|
||||
charge to all third parties under the terms of this License.
|
||||
|
||||
d) If a facility in the modified Library refers to a function or a
|
||||
table of data to be supplied by an application program that uses
|
||||
the facility, other than as an argument passed when the facility
|
||||
is invoked, then you must make a good faith effort to ensure that,
|
||||
in the event an application does not supply such function or
|
||||
table, the facility still operates, and performs whatever part of
|
||||
its purpose remains meaningful.
|
||||
|
||||
(For example, a function in a library to compute square roots has
|
||||
a purpose that is entirely well-defined independent of the
|
||||
application. Therefore, Subsection 2d requires that any
|
||||
application-supplied function or table used by this function must
|
||||
be optional: if the application does not supply it, the square
|
||||
root function must still compute square roots.)
|
||||
|
||||
These requirements apply to the modified work as a whole. If
|
||||
identifiable sections of that work are not derived from the Library,
|
||||
and can be reasonably considered independent and separate works in
|
||||
themselves, then this License, and its terms, do not apply to those
|
||||
sections when you distribute them as separate works. But when you
|
||||
distribute the same sections as part of a whole which is a work based
|
||||
on the Library, the distribution of the whole must be on the terms of
|
||||
this License, whose permissions for other licensees extend to the
|
||||
entire whole, and thus to each and every part regardless of who wrote
|
||||
it.
|
||||
|
||||
Thus, it is not the intent of this section to claim rights or contest
|
||||
your rights to work written entirely by you; rather, the intent is to
|
||||
exercise the right to control the distribution of derivative or
|
||||
collective works based on the Library.
|
||||
|
||||
In addition, mere aggregation of another work not based on the Library
|
||||
with the Library (or with a work based on the Library) on a volume of
|
||||
a storage or distribution medium does not bring the other work under
|
||||
the scope of this License.
|
||||
|
||||
3. You may opt to apply the terms of the ordinary GNU General Public
|
||||
License instead of this License to a given copy of the Library. To do
|
||||
this, you must alter all the notices that refer to this License, so
|
||||
that they refer to the ordinary GNU General Public License, version 2,
|
||||
instead of to this License. (If a newer version than version 2 of the
|
||||
ordinary GNU General Public License has appeared, then you can specify
|
||||
that version instead if you wish.) Do not make any other change in
|
||||
these notices.
|
||||
|
||||
Once this change is made in a given copy, it is irreversible for
|
||||
that copy, so the ordinary GNU General Public License applies to all
|
||||
subsequent copies and derivative works made from that copy.
|
||||
|
||||
This option is useful when you wish to copy part of the code of
|
||||
the Library into a program that is not a library.
|
||||
|
||||
4. You may copy and distribute the Library (or a portion or
|
||||
derivative of it, under Section 2) in object code or executable form
|
||||
under the terms of Sections 1 and 2 above provided that you accompany
|
||||
it with the complete corresponding machine-readable source code, which
|
||||
must be distributed under the terms of Sections 1 and 2 above on a
|
||||
medium customarily used for software interchange.
|
||||
|
||||
If distribution of object code is made by offering access to copy
|
||||
from a designated place, then offering equivalent access to copy the
|
||||
source code from the same place satisfies the requirement to
|
||||
distribute the source code, even though third parties are not
|
||||
compelled to copy the source along with the object code.
|
||||
|
||||
5. A program that contains no derivative of any portion of the
|
||||
Library, but is designed to work with the Library by being compiled or
|
||||
linked with it, is called a "work that uses the Library". Such a
|
||||
work, in isolation, is not a derivative work of the Library, and
|
||||
therefore falls outside the scope of this License.
|
||||
|
||||
However, linking a "work that uses the Library" with the Library
|
||||
creates an executable that is a derivative of the Library (because it
|
||||
contains portions of the Library), rather than a "work that uses the
|
||||
library". The executable is therefore covered by this License.
|
||||
Section 6 states terms for distribution of such executables.
|
||||
|
||||
When a "work that uses the Library" uses material from a header file
|
||||
that is part of the Library, the object code for the work may be a
|
||||
derivative work of the Library even though the source code is not.
|
||||
Whether this is true is especially significant if the work can be
|
||||
linked without the Library, or if the work is itself a library. The
|
||||
threshold for this to be true is not precisely defined by law.
|
||||
|
||||
If such an object file uses only numerical parameters, data
|
||||
structure layouts and accessors, and small macros and small inline
|
||||
functions (ten lines or less in length), then the use of the object
|
||||
file is unrestricted, regardless of whether it is legally a derivative
|
||||
work. (Executables containing this object code plus portions of the
|
||||
Library will still fall under Section 6.)
|
||||
|
||||
Otherwise, if the work is a derivative of the Library, you may
|
||||
distribute the object code for the work under the terms of Section 6.
|
||||
Any executables containing that work also fall under Section 6,
|
||||
whether or not they are linked directly with the Library itself.
|
||||
|
||||
6. As an exception to the Sections above, you may also combine or
|
||||
link a "work that uses the Library" with the Library to produce a
|
||||
work containing portions of the Library, and distribute that work
|
||||
under terms of your choice, provided that the terms permit
|
||||
modification of the work for the customer's own use and reverse
|
||||
engineering for debugging such modifications.
|
||||
|
||||
You must give prominent notice with each copy of the work that the
|
||||
Library is used in it and that the Library and its use are covered by
|
||||
this License. You must supply a copy of this License. If the work
|
||||
during execution displays copyright notices, you must include the
|
||||
copyright notice for the Library among them, as well as a reference
|
||||
directing the user to the copy of this License. Also, you must do one
|
||||
of these things:
|
||||
|
||||
a) Accompany the work with the complete corresponding
|
||||
machine-readable source code for the Library including whatever
|
||||
changes were used in the work (which must be distributed under
|
||||
Sections 1 and 2 above); and, if the work is an executable linked
|
||||
with the Library, with the complete machine-readable "work that
|
||||
uses the Library", as object code and/or source code, so that the
|
||||
user can modify the Library and then relink to produce a modified
|
||||
executable containing the modified Library. (It is understood
|
||||
that the user who changes the contents of definitions files in the
|
||||
Library will not necessarily be able to recompile the application
|
||||
to use the modified definitions.)
|
||||
|
||||
b) Use a suitable shared library mechanism for linking with the
|
||||
Library. A suitable mechanism is one that (1) uses at run time a
|
||||
copy of the library already present on the user's computer system,
|
||||
rather than copying library functions into the executable, and (2)
|
||||
will operate properly with a modified version of the library, if
|
||||
the user installs one, as long as the modified version is
|
||||
interface-compatible with the version that the work was made with.
|
||||
|
||||
c) Accompany the work with a written offer, valid for at
|
||||
least three years, to give the same user the materials
|
||||
specified in Subsection 6a, above, for a charge no more
|
||||
than the cost of performing this distribution.
|
||||
|
||||
d) If distribution of the work is made by offering access to copy
|
||||
from a designated place, offer equivalent access to copy the above
|
||||
specified materials from the same place.
|
||||
|
||||
e) Verify that the user has already received a copy of these
|
||||
materials or that you have already sent this user a copy.
|
||||
|
||||
For an executable, the required form of the "work that uses the
|
||||
Library" must include any data and utility programs needed for
|
||||
reproducing the executable from it. However, as a special exception,
|
||||
the materials to be distributed need not include anything that is
|
||||
normally distributed (in either source or binary form) with the major
|
||||
components (compiler, kernel, and so on) of the operating system on
|
||||
which the executable runs, unless that component itself accompanies
|
||||
the executable.
|
||||
|
||||
It may happen that this requirement contradicts the license
|
||||
restrictions of other proprietary libraries that do not normally
|
||||
accompany the operating system. Such a contradiction means you cannot
|
||||
use both them and the Library together in an executable that you
|
||||
distribute.
|
||||
|
||||
7. You may place library facilities that are a work based on the
|
||||
Library side-by-side in a single library together with other library
|
||||
facilities not covered by this License, and distribute such a combined
|
||||
library, provided that the separate distribution of the work based on
|
||||
the Library and of the other library facilities is otherwise
|
||||
permitted, and provided that you do these two things:
|
||||
|
||||
a) Accompany the combined library with a copy of the same work
|
||||
based on the Library, uncombined with any other library
|
||||
facilities. This must be distributed under the terms of the
|
||||
Sections above.
|
||||
|
||||
b) Give prominent notice with the combined library of the fact
|
||||
that part of it is a work based on the Library, and explaining
|
||||
where to find the accompanying uncombined form of the same work.
|
||||
|
||||
8. You may not copy, modify, sublicense, link with, or distribute
|
||||
the Library except as expressly provided under this License. Any
|
||||
attempt otherwise to copy, modify, sublicense, link with, or
|
||||
distribute the Library is void, and will automatically terminate your
|
||||
rights under this License. However, parties who have received copies,
|
||||
or rights, from you under this License will not have their licenses
|
||||
terminated so long as such parties remain in full compliance.
|
||||
|
||||
9. You are not required to accept this License, since you have not
|
||||
signed it. However, nothing else grants you permission to modify or
|
||||
distribute the Library or its derivative works. These actions are
|
||||
prohibited by law if you do not accept this License. Therefore, by
|
||||
modifying or distributing the Library (or any work based on the
|
||||
Library), you indicate your acceptance of this License to do so, and
|
||||
all its terms and conditions for copying, distributing or modifying
|
||||
the Library or works based on it.
|
||||
|
||||
10. Each time you redistribute the Library (or any work based on the
|
||||
Library), the recipient automatically receives a license from the
|
||||
original licensor to copy, distribute, link with or modify the Library
|
||||
subject to these terms and conditions. You may not impose any further
|
||||
restrictions on the recipients' exercise of the rights granted herein.
|
||||
You are not responsible for enforcing compliance by third parties with
|
||||
this License.
|
||||
|
||||
11. If, as a consequence of a court judgment or allegation of patent
|
||||
infringement or for any other reason (not limited to patent issues),
|
||||
conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot
|
||||
distribute so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you
|
||||
may not distribute the Library at all. For example, if a patent
|
||||
license would not permit royalty-free redistribution of the Library by
|
||||
all those who receive copies directly or indirectly through you, then
|
||||
the only way you could satisfy both it and this License would be to
|
||||
refrain entirely from distribution of the Library.
|
||||
|
||||
If any portion of this section is held invalid or unenforceable under any
|
||||
particular circumstance, the balance of the section is intended to apply,
|
||||
and the section as a whole is intended to apply in other circumstances.
|
||||
|
||||
It is not the purpose of this section to induce you to infringe any
|
||||
patents or other property right claims or to contest validity of any
|
||||
such claims; this section has the sole purpose of protecting the
|
||||
integrity of the free software distribution system which is
|
||||
implemented by public license practices. Many people have made
|
||||
generous contributions to the wide range of software distributed
|
||||
through that system in reliance on consistent application of that
|
||||
system; it is up to the author/donor to decide if he or she is willing
|
||||
to distribute software through any other system and a licensee cannot
|
||||
impose that choice.
|
||||
|
||||
This section is intended to make thoroughly clear what is believed to
|
||||
be a consequence of the rest of this License.
|
||||
|
||||
12. If the distribution and/or use of the Library is restricted in
|
||||
certain countries either by patents or by copyrighted interfaces, the
|
||||
original copyright holder who places the Library under this License may add
|
||||
an explicit geographical distribution limitation excluding those countries,
|
||||
so that distribution is permitted only in or among countries not thus
|
||||
excluded. In such case, this License incorporates the limitation as if
|
||||
written in the body of this License.
|
||||
|
||||
13. The Free Software Foundation may publish revised and/or new
|
||||
versions of the Lesser General Public License from time to time.
|
||||
Such new versions will be similar in spirit to the present version,
|
||||
but may differ in detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Library
|
||||
specifies a version number of this License which applies to it and
|
||||
"any later version", you have the option of following the terms and
|
||||
conditions either of that version or of any later version published by
|
||||
the Free Software Foundation. If the Library does not specify a
|
||||
license version number, you may choose any version ever published by
|
||||
the Free Software Foundation.
|
||||
|
||||
14. If you wish to incorporate parts of the Library into other free
|
||||
programs whose distribution conditions are incompatible with these,
|
||||
write to the author to ask for permission. For software which is
|
||||
copyrighted by the Free Software Foundation, write to the Free
|
||||
Software Foundation; we sometimes make exceptions for this. Our
|
||||
decision will be guided by the two goals of preserving the free status
|
||||
of all derivatives of our free software and of promoting the sharing
|
||||
and reuse of software generally.
|
||||
|
||||
NO WARRANTY
|
||||
|
||||
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
|
||||
WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
|
||||
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
|
||||
OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
|
||||
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
|
||||
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
|
||||
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
|
||||
WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
|
||||
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
|
||||
FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
|
||||
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
|
||||
LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
|
||||
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
|
||||
FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
|
||||
SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
DAMAGES.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Libraries
|
||||
|
||||
If you develop a new library, and you want it to be of the greatest
|
||||
possible use to the public, we recommend making it free software that
|
||||
everyone can redistribute and change. You can do so by permitting
|
||||
redistribution under these terms (or, alternatively, under the terms of the
|
||||
ordinary General Public License).
|
||||
|
||||
To apply these terms, attach the following notices to the library. It is
|
||||
safest to attach them to the start of each source file to most effectively
|
||||
convey the exclusion of warranty; and each file should have at least the
|
||||
"copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the library's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This library is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU Lesser General Public
|
||||
License as published by the Free Software Foundation; either
|
||||
version 2.1 of the License, or (at your option) any later version.
|
||||
|
||||
This library is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
Lesser General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Lesser General Public
|
||||
License along with this library; if not, write to the Free Software
|
||||
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
|
||||
USA
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or your
|
||||
school, if any, to sign a "copyright disclaimer" for the library, if
|
||||
necessary. Here is a sample; alter the names:
|
||||
|
||||
Yoyodyne, Inc., hereby disclaims all copyright interest in the
|
||||
library `Frob' (a library for tweaking knobs) written by James Random
|
||||
Hacker.
|
||||
|
||||
<signature of Ty Coon>, 1 April 1990
|
||||
Ty Coon, President of Vice
|
||||
|
||||
That's all there is to it!
|
|
@ -0,0 +1,37 @@
|
|||
# connector-proxy-demo
|
||||
A Spiff-Connector for demonstration purposes - shows how to build connectors to some common 3rd party systems.
|
||||
|
||||
# How to create a Connector Proxy for SpiffWorklow
|
||||
|
||||
## Step 1. Create a python project with a few dependencies:
|
||||
Create a bare-bones Flask application that depends on the core spiffworkflow-proxy (a flask blueprint)
|
||||
and any connector dependencies you wish to use.
|
||||
We will hopefully be adding a number of available connectors in the future. Please checkout the connector-aws repository for an example of how to create connections to new services.
|
||||
``` python
|
||||
python = "^3.11"
|
||||
Flask = "^2.2.2"
|
||||
spiffworkflow-proxy = {git = "https://github.com/sartography/spiffworkflow-proxy"}
|
||||
connector-aws = { git = "https://github.com/sartography/connector-aws.git"}
|
||||
```
|
||||
|
||||
## Step 2.
|
||||
Create a basic Flask Application that uses the SpiffWorkflow Proxy's Flask Blueprint
|
||||
```python
|
||||
import os
|
||||
from spiffworkflow_proxy.blueprint import proxy_blueprint
|
||||
from flask import Flask
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config.from_pyfile("config.py", silent=True)
|
||||
app.register_blueprint(proxy_blueprint)
|
||||
if __name__ == "__main__":
|
||||
app.run(host="localhost", port=5000)
|
||||
```
|
||||
|
||||
## Step 3.
|
||||
Fire it up.
|
||||
```bash
|
||||
#> flask run
|
||||
```
|
||||
|
||||
Any dependencies you add will now be available for SpiffWorkflow to call using a Service Task. What's more, those services are now discoverable! So when someone drops a Service Task into their diagram, they will have a dropdown list of all the services you have made available to them. And those services will know what parameters are required, and can prompt diagram authors to provide information necessary to make the call. This can be no parameters at all (just give me a fact about Chuck Norris) to arbitrarily complex parameters such as a json structure to be added to a DynamoDB Table.
|
|
@ -0,0 +1,18 @@
|
|||
import os
|
||||
|
||||
from spiffworkflow_proxy.blueprint import proxy_blueprint
|
||||
from flask import Flask
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config.from_pyfile("config.py", silent=True)
|
||||
|
||||
if app.config["ENV"] != "production":
|
||||
os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"
|
||||
|
||||
# Use the SpiffConnector Blueprint, which will auto-discover any
|
||||
# connector-* packages and provide API endpoints for listing and executing
|
||||
# available services.
|
||||
app.register_blueprint(proxy_blueprint)
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(host="localhost", port=5000)
|
|
@ -0,0 +1,506 @@
|
|||
[[package]]
|
||||
name = "boto3"
|
||||
version = "1.26.6"
|
||||
description = "The AWS SDK for Python"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">= 3.7"
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.29.6,<1.30.0"
|
||||
jmespath = ">=0.7.1,<2.0.0"
|
||||
s3transfer = ">=0.6.0,<0.7.0"
|
||||
|
||||
[package.extras]
|
||||
crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
|
||||
|
||||
[[package]]
|
||||
name = "botocore"
|
||||
version = "1.29.6"
|
||||
description = "Low-level, data-driven core of boto 3."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">= 3.7"
|
||||
|
||||
[package.dependencies]
|
||||
jmespath = ">=0.7.1,<2.0.0"
|
||||
python-dateutil = ">=2.1,<3.0.0"
|
||||
urllib3 = ">=1.25.4,<1.27"
|
||||
|
||||
[package.extras]
|
||||
crt = ["awscrt (==0.14.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "cachelib"
|
||||
version = "0.9.0"
|
||||
description = "A collection of cache libraries in the same API interface."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2022.9.24"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "2.1.1"
|
||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6.0"
|
||||
|
||||
[package.extras]
|
||||
unicode_backport = ["unicodedata2"]
|
||||
|
||||
[[package]]
|
||||
name = "click"
|
||||
version = "8.1.3"
|
||||
description = "Composable command line interface toolkit"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
description = "Cross-platform colored terminal text."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
|
||||
[[package]]
|
||||
name = "connector-aws"
|
||||
version = "0.1.0"
|
||||
description = "Connect Service Tasks to AWS web services."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "^3.10"
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
boto3 = "^1.26.5"
|
||||
simplejson = "^3.17.6"
|
||||
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/sartography/connector-aws.git"
|
||||
reference = "HEAD"
|
||||
resolved_reference = "ad386286bcc72eeb000b9b053596dfee40f7c6b5"
|
||||
|
||||
[[package]]
|
||||
name = "Flask"
|
||||
version = "2.2.2"
|
||||
description = "A simple framework for building complex web applications."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=8.0"
|
||||
itsdangerous = ">=2.0"
|
||||
Jinja2 = ">=3.0"
|
||||
Werkzeug = ">=2.2.2"
|
||||
|
||||
[package.extras]
|
||||
async = ["asgiref (>=3.2)"]
|
||||
dotenv = ["python-dotenv"]
|
||||
|
||||
[[package]]
|
||||
name = "Flask-OAuthlib"
|
||||
version = "0.9.6"
|
||||
description = "OAuthlib for Flask"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
cachelib = "*"
|
||||
Flask = "*"
|
||||
oauthlib = ">=1.1.2,<2.0.3 || >2.0.3,<2.0.4 || >2.0.4,<2.0.5 || >2.0.5,<3.0.0"
|
||||
requests-oauthlib = ">=0.6.2,<1.2.0"
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.4"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
|
||||
[[package]]
|
||||
name = "itsdangerous"
|
||||
version = "2.1.2"
|
||||
description = "Safely pass data to untrusted environments and back."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "Jinja2"
|
||||
version = "3.1.2"
|
||||
description = "A very fast and expressive template engine."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
MarkupSafe = ">=2.0"
|
||||
|
||||
[package.extras]
|
||||
i18n = ["Babel (>=2.7)"]
|
||||
|
||||
[[package]]
|
||||
name = "jmespath"
|
||||
version = "1.0.1"
|
||||
description = "JSON Matching Expressions"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "MarkupSafe"
|
||||
version = "2.1.1"
|
||||
description = "Safely add untrusted strings to HTML/XML markup."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "oauthlib"
|
||||
version = "2.1.0"
|
||||
description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.extras]
|
||||
rsa = ["cryptography"]
|
||||
signals = ["blinker"]
|
||||
signedtoken = ["cryptography", "pyjwt (>=1.0.0)"]
|
||||
test = ["blinker", "cryptography", "mock", "nose", "pyjwt (>=1.0.0)", "unittest2"]
|
||||
|
||||
[[package]]
|
||||
name = "python-dateutil"
|
||||
version = "2.8.2"
|
||||
description = "Extensions to the standard Python datetime module"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
||||
|
||||
[package.dependencies]
|
||||
six = ">=1.5"
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.28.1"
|
||||
description = "Python HTTP for Humans."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7, <4"
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2017.4.17"
|
||||
charset-normalizer = ">=2,<3"
|
||||
idna = ">=2.5,<4"
|
||||
urllib3 = ">=1.21.1,<1.27"
|
||||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "requests-oauthlib"
|
||||
version = "1.1.0"
|
||||
description = "OAuthlib authentication support for Requests."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
|
||||
[package.dependencies]
|
||||
oauthlib = ">=2.1.0,<3.0.0"
|
||||
requests = ">=2.0.0"
|
||||
|
||||
[package.extras]
|
||||
rsa = ["oauthlib[signedtoken] (>=2.1.0,<3.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "s3transfer"
|
||||
version = "0.6.0"
|
||||
description = "An Amazon S3 Transfer Manager"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">= 3.7"
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.12.36,<2.0a.0"
|
||||
|
||||
[package.extras]
|
||||
crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "simplejson"
|
||||
version = "3.17.6"
|
||||
description = "Simple, fast, extensible JSON encoder/decoder for Python"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
version = "1.16.0"
|
||||
description = "Python 2 and 3 compatibility utilities"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
|
||||
[[package]]
|
||||
name = "spiffworkflow-proxy"
|
||||
version = "0.1.0"
|
||||
description = "A blueprint that can allow (and limit) SpiffWorkflow's Service Tasks access to an organizations API's, such as connections to AWS Services and existing applications."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "^3.10"
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
Flask = "^2.2"
|
||||
Flask-OAuthlib = "^0.9.6"
|
||||
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/sartography/spiffworkflow-proxy"
|
||||
reference = "HEAD"
|
||||
resolved_reference = "5e4926030cf6f2808ddb8e65527168dd914e5fc3"
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "1.26.12"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4"
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
|
||||
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "Werkzeug"
|
||||
version = "2.2.2"
|
||||
description = "The comprehensive WSGI web application library."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
MarkupSafe = ">=2.1.1"
|
||||
|
||||
[package.extras]
|
||||
watchdog = ["watchdog"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "86cf682d49dc495c8cf6dc60a8aedc31ad32a293e6ceaf7b1428e0c232f8319e"
|
||||
|
||||
[metadata.files]
|
||||
boto3 = [
|
||||
{file = "boto3-1.26.6-py3-none-any.whl", hash = "sha256:2bd7d3e8362f0e0d84fe4c3a06f1bd46fd2dbedbea76ac24e89f28439837a9e7"},
|
||||
{file = "boto3-1.26.6.tar.gz", hash = "sha256:4943faf38979ac445627390b431b0c08a73ccd5ecd46983e1d29cee454d14aaa"},
|
||||
]
|
||||
botocore = [
|
||||
{file = "botocore-1.29.6-py3-none-any.whl", hash = "sha256:bb595ed6a42ff85b4d5fb7a2e31b8584b87df46933a6f830fd98f91b1feea279"},
|
||||
{file = "botocore-1.29.6.tar.gz", hash = "sha256:574a9dc8b7cf1d866e6255c57af25de1f0da1babc6ce9faf05f227fd28ca905e"},
|
||||
]
|
||||
cachelib = [
|
||||
{file = "cachelib-0.9.0-py3-none-any.whl", hash = "sha256:811ceeb1209d2fe51cd2b62810bd1eccf70feba5c52641532498be5c675493b3"},
|
||||
{file = "cachelib-0.9.0.tar.gz", hash = "sha256:38222cc7c1b79a23606de5c2607f4925779e37cdcea1c2ad21b8bae94b5425a5"},
|
||||
]
|
||||
certifi = [
|
||||
{file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"},
|
||||
{file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"},
|
||||
]
|
||||
charset-normalizer = [
|
||||
{file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
|
||||
{file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
|
||||
]
|
||||
click = [
|
||||
{file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
|
||||
{file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
|
||||
]
|
||||
colorama = [
|
||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
connector-aws = []
|
||||
Flask = [
|
||||
{file = "Flask-2.2.2-py3-none-any.whl", hash = "sha256:b9c46cc36662a7949f34b52d8ec7bb59c0d74ba08ba6cb9ce9adc1d8676d9526"},
|
||||
{file = "Flask-2.2.2.tar.gz", hash = "sha256:642c450d19c4ad482f96729bd2a8f6d32554aa1e231f4f6b4e7e5264b16cca2b"},
|
||||
]
|
||||
Flask-OAuthlib = [
|
||||
{file = "Flask-OAuthlib-0.9.6.tar.gz", hash = "sha256:5bb79c8a8e670c2eb4cb553dfc3283b6c8d1202f674934676dc173cee94fe39c"},
|
||||
{file = "Flask_OAuthlib-0.9.6-py3-none-any.whl", hash = "sha256:a5c3b62959aa1922470a62b6ebf4273b75f1c29561a7eb4a69cde85d45a1d669"},
|
||||
]
|
||||
idna = [
|
||||
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
|
||||
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
|
||||
]
|
||||
itsdangerous = [
|
||||
{file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"},
|
||||
{file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"},
|
||||
]
|
||||
Jinja2 = [
|
||||
{file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
|
||||
{file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
|
||||
]
|
||||
jmespath = [
|
||||
{file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"},
|
||||
{file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
|
||||
]
|
||||
MarkupSafe = [
|
||||
{file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"},
|
||||
{file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"},
|
||||
{file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"},
|
||||
{file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"},
|
||||
{file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"},
|
||||
{file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"},
|
||||
{file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"},
|
||||
{file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"},
|
||||
{file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"},
|
||||
{file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"},
|
||||
{file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"},
|
||||
{file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"},
|
||||
{file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"},
|
||||
{file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"},
|
||||
{file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"},
|
||||
{file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"},
|
||||
{file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"},
|
||||
{file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"},
|
||||
{file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"},
|
||||
{file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"},
|
||||
{file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"},
|
||||
{file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"},
|
||||
{file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"},
|
||||
{file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"},
|
||||
{file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"},
|
||||
{file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"},
|
||||
{file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"},
|
||||
{file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"},
|
||||
{file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"},
|
||||
{file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"},
|
||||
{file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"},
|
||||
{file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"},
|
||||
{file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"},
|
||||
{file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"},
|
||||
{file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"},
|
||||
{file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"},
|
||||
{file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"},
|
||||
{file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"},
|
||||
{file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"},
|
||||
{file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"},
|
||||
]
|
||||
oauthlib = [
|
||||
{file = "oauthlib-2.1.0-py2.py3-none-any.whl", hash = "sha256:d883b36b21a6ad813953803edfa563b1b579d79ca758fe950d1bc9e8b326025b"},
|
||||
{file = "oauthlib-2.1.0.tar.gz", hash = "sha256:ac35665a61c1685c56336bda97d5eefa246f1202618a1d6f34fccb1bdd404162"},
|
||||
]
|
||||
python-dateutil = [
|
||||
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
|
||||
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
|
||||
]
|
||||
requests = [
|
||||
{file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
|
||||
{file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
|
||||
]
|
||||
requests-oauthlib = [
|
||||
{file = "requests-oauthlib-1.1.0.tar.gz", hash = "sha256:eabd8eb700ebed81ba080c6ead96d39d6bdc39996094bd23000204f6965786b0"},
|
||||
{file = "requests_oauthlib-1.1.0-py2.py3-none-any.whl", hash = "sha256:be76f2bb72ca5525998e81d47913e09b1ca8b7957ae89b46f787a79e68ad5e61"},
|
||||
]
|
||||
s3transfer = [
|
||||
{file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"},
|
||||
{file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"},
|
||||
]
|
||||
simplejson = [
|
||||
{file = "simplejson-3.17.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a"},
|
||||
{file = "simplejson-3.17.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c"},
|
||||
{file = "simplejson-3.17.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:0de783e9c2b87bdd75b57efa2b6260c24b94605b5c9843517577d40ee0c3cc8a"},
|
||||
{file = "simplejson-3.17.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:d24a9e61df7a7787b338a58abfba975414937b609eb6b18973e25f573bc0eeeb"},
|
||||
{file = "simplejson-3.17.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:e8603e691580487f11306ecb066c76f1f4a8b54fb3bdb23fa40643a059509366"},
|
||||
{file = "simplejson-3.17.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:9b01e7b00654115965a206e3015f0166674ec1e575198a62a977355597c0bef5"},
|
||||
{file = "simplejson-3.17.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:37bc0cf0e5599f36072077e56e248f3336917ded1d33d2688624d8ed3cefd7d2"},
|
||||
{file = "simplejson-3.17.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cf6e7d5fe2aeb54898df18db1baf479863eae581cce05410f61f6b4188c8ada1"},
|
||||
{file = "simplejson-3.17.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:bdfc54b4468ed4cd7415928cbe782f4d782722a81aeb0f81e2ddca9932632211"},
|
||||
{file = "simplejson-3.17.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd16302d39c4d6f4afde80edd0c97d4db643327d355a312762ccd9bd2ca515ed"},
|
||||
{file = "simplejson-3.17.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:deac4bdafa19bbb89edfb73b19f7f69a52d0b5bd3bb0c4ad404c1bbfd7b4b7fd"},
|
||||
{file = "simplejson-3.17.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8bbdb166e2fb816e43ab034c865147edafe28e1b19c72433147789ac83e2dda"},
|
||||
{file = "simplejson-3.17.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7854326920d41c3b5d468154318fe6ba4390cb2410480976787c640707e0180"},
|
||||
{file = "simplejson-3.17.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:04e31fa6ac8e326480703fb6ded1488bfa6f1d3f760d32e29dbf66d0838982ce"},
|
||||
{file = "simplejson-3.17.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f63600ec06982cdf480899026f4fda622776f5fabed9a869fdb32d72bc17e99a"},
|
||||
{file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e03c3b8cc7883a54c3f34a6a135c4a17bc9088a33f36796acdb47162791b02f6"},
|
||||
{file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a2d30d6c1652140181dc6861f564449ad71a45e4f165a6868c27d36745b65d40"},
|
||||
{file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a1aa6e4cae8e3b8d5321be4f51c5ce77188faf7baa9fe1e78611f93a8eed2882"},
|
||||
{file = "simplejson-3.17.6-cp310-cp310-win32.whl", hash = "sha256:97202f939c3ff341fc3fa84d15db86156b1edc669424ba20b0a1fcd4a796a045"},
|
||||
{file = "simplejson-3.17.6-cp310-cp310-win_amd64.whl", hash = "sha256:80d3bc9944be1d73e5b1726c3bbfd2628d3d7fe2880711b1eb90b617b9b8ac70"},
|
||||
{file = "simplejson-3.17.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9fa621b3c0c05d965882c920347b6593751b7ab20d8fa81e426f1735ca1a9fc7"},
|
||||
{file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd2fb11922f58df8528adfca123f6a84748ad17d066007e7ac977720063556bd"},
|
||||
{file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:724c1fe135aa437d5126138d977004d165a3b5e2ee98fc4eb3e7c0ef645e7e27"},
|
||||
{file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4ff4ac6ff3aa8f814ac0f50bf218a2e1a434a17aafad4f0400a57a8cc62ef17f"},
|
||||
{file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:67093a526e42981fdd954868062e56c9b67fdd7e712616cc3265ad0c210ecb51"},
|
||||
{file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b4af7ad7e4ac515bc6e602e7b79e2204e25dbd10ab3aa2beef3c5a9cad2c7"},
|
||||
{file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:1c9b1ed7ed282b36571638297525f8ef80f34b3e2d600a56f962c6044f24200d"},
|
||||
{file = "simplejson-3.17.6-cp36-cp36m-win32.whl", hash = "sha256:632ecbbd2228575e6860c9e49ea3cc5423764d5aa70b92acc4e74096fb434044"},
|
||||
{file = "simplejson-3.17.6-cp36-cp36m-win_amd64.whl", hash = "sha256:4c09868ddb86bf79b1feb4e3e7e4a35cd6e61ddb3452b54e20cf296313622566"},
|
||||
{file = "simplejson-3.17.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b6bd8144f15a491c662f06814bd8eaa54b17f26095bb775411f39bacaf66837"},
|
||||
{file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5decdc78849617917c206b01e9fc1d694fd58caa961be816cb37d3150d613d9a"},
|
||||
{file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:521877c7bd060470806eb6335926e27453d740ac1958eaf0d8c00911bc5e1802"},
|
||||
{file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:65b998193bd7b0c7ecdfffbc825d808eac66279313cb67d8892bb259c9d91494"},
|
||||
{file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac786f6cb7aa10d44e9641c7a7d16d7f6e095b138795cd43503769d4154e0dc2"},
|
||||
{file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3ff5b3464e1ce86a8de8c88e61d4836927d5595c2162cab22e96ff551b916e81"},
|
||||
{file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:69bd56b1d257a91e763256d63606937ae4eb890b18a789b66951c00062afec33"},
|
||||
{file = "simplejson-3.17.6-cp37-cp37m-win32.whl", hash = "sha256:b81076552d34c27e5149a40187a8f7e2abb2d3185576a317aaf14aeeedad862a"},
|
||||
{file = "simplejson-3.17.6-cp37-cp37m-win_amd64.whl", hash = "sha256:07ecaafc1b1501f275bf5acdee34a4ad33c7c24ede287183ea77a02dc071e0c0"},
|
||||
{file = "simplejson-3.17.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:068670af975247acbb9fc3d5393293368cda17026db467bf7a51548ee8f17ee1"},
|
||||
{file = "simplejson-3.17.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4d1c135af0c72cb28dd259cf7ba218338f4dc027061262e46fe058b4e6a4c6a3"},
|
||||
{file = "simplejson-3.17.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23fe704da910ff45e72543cbba152821685a889cf00fc58d5c8ee96a9bad5f94"},
|
||||
{file = "simplejson-3.17.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f444762fed1bc1fd75187ef14a20ed900c1fbb245d45be9e834b822a0223bc81"},
|
||||
{file = "simplejson-3.17.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:681eb4d37c9a9a6eb9b3245a5e89d7f7b2b9895590bb08a20aa598c1eb0a1d9d"},
|
||||
{file = "simplejson-3.17.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8e8607d8f6b4f9d46fee11447e334d6ab50e993dd4dbfb22f674616ce20907ab"},
|
||||
{file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b10556817f09d46d420edd982dd0653940b90151d0576f09143a8e773459f6fe"},
|
||||
{file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e1ec8a9ee0987d4524ffd6299e778c16cc35fef6d1a2764e609f90962f0b293a"},
|
||||
{file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b4126cac7d69ac06ff22efd3e0b3328a4a70624fcd6bca4fc1b4e6d9e2e12bf"},
|
||||
{file = "simplejson-3.17.6-cp38-cp38-win32.whl", hash = "sha256:35a49ebef25f1ebdef54262e54ae80904d8692367a9f208cdfbc38dbf649e00a"},
|
||||
{file = "simplejson-3.17.6-cp38-cp38-win_amd64.whl", hash = "sha256:743cd768affaa508a21499f4858c5b824ffa2e1394ed94eb85caf47ac0732198"},
|
||||
{file = "simplejson-3.17.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb62d517a516128bacf08cb6a86ecd39fb06d08e7c4980251f5d5601d29989ba"},
|
||||
{file = "simplejson-3.17.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:12133863178a8080a3dccbf5cb2edfab0001bc41e5d6d2446af2a1131105adfe"},
|
||||
{file = "simplejson-3.17.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5540fba2d437edaf4aa4fbb80f43f42a8334206ad1ad3b27aef577fd989f20d9"},
|
||||
{file = "simplejson-3.17.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d74ee72b5071818a1a5dab47338e87f08a738cb938a3b0653b9e4d959ddd1fd9"},
|
||||
{file = "simplejson-3.17.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:28221620f4dcabdeac310846629b976e599a13f59abb21616356a85231ebd6ad"},
|
||||
{file = "simplejson-3.17.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b09bc62e5193e31d7f9876220fb429ec13a6a181a24d897b9edfbbdbcd678851"},
|
||||
{file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7255a37ff50593c9b2f1afa8fafd6ef5763213c1ed5a9e2c6f5b9cc925ab979f"},
|
||||
{file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:401d40969cee3df7bda211e57b903a534561b77a7ade0dd622a8d1a31eaa8ba7"},
|
||||
{file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a649d0f66029c7eb67042b15374bd93a26aae202591d9afd71e111dd0006b198"},
|
||||
{file = "simplejson-3.17.6-cp39-cp39-win32.whl", hash = "sha256:522fad7be85de57430d6d287c4b635813932946ebf41b913fe7e880d154ade2e"},
|
||||
{file = "simplejson-3.17.6-cp39-cp39-win_amd64.whl", hash = "sha256:3fe87570168b2ae018391e2b43fbf66e8593a86feccb4b0500d134c998983ccc"},
|
||||
{file = "simplejson-3.17.6.tar.gz", hash = "sha256:cf98038d2abf63a1ada5730e91e84c642ba6c225b0198c3684151b1f80c5f8a6"},
|
||||
]
|
||||
six = [
|
||||
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
|
||||
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
||||
]
|
||||
spiffworkflow-proxy = []
|
||||
urllib3 = [
|
||||
{file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"},
|
||||
{file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"},
|
||||
]
|
||||
Werkzeug = [
|
||||
{file = "Werkzeug-2.2.2-py3-none-any.whl", hash = "sha256:f979ab81f58d7318e064e99c4506445d60135ac5cd2e177a2de0089bfd4c9bd5"},
|
||||
{file = "Werkzeug-2.2.2.tar.gz", hash = "sha256:7ea2d48322cc7c0f8b3a215ed73eabd7b5d75d0b50e31ab006286ccff9e00b8f"},
|
||||
]
|
|
@ -0,0 +1,24 @@
|
|||
[tool.poetry]
|
||||
name = "connector-proxy-demo"
|
||||
version = "0.1.0"
|
||||
description = "An example showing how to use the Spiffworkflow-proxy's Flask Blueprint to make services available to Service Tasks."
|
||||
authors = ["Dan <dan@sartography.com>"]
|
||||
license = "LGPL"
|
||||
readme = "README.md"
|
||||
packages = [{include = "connector_proxy_demo", from = "src"}]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10"
|
||||
Flask = "^2.2.2"
|
||||
spiffworkflow-proxy = {git = "https://github.com/sartography/spiffworkflow-proxy"}
|
||||
connector-aws = { git = "https://github.com/sartography/connector-aws.git"}
|
||||
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
pythonpath = [
|
||||
".", "src",
|
||||
]
|
|
@ -2,4 +2,4 @@ pip==22.3.1
|
|||
nox==2022.8.7
|
||||
nox-poetry==1.0.2
|
||||
poetry==1.2.2
|
||||
virtualenv==20.16.6
|
||||
virtualenv==20.16.7
|
||||
|
|
|
@ -491,17 +491,20 @@ flake8 = "*"
|
|||
|
||||
[[package]]
|
||||
name = "flake8-rst-docstrings"
|
||||
version = "0.2.7"
|
||||
description = "Python docstring reStructuredText (RST) validator"
|
||||
version = "0.3.0"
|
||||
description = "Python docstring reStructuredText (RST) validator for flake8"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
flake8 = ">=3.0.0"
|
||||
flake8 = ">=3"
|
||||
pygments = "*"
|
||||
restructuredtext-lint = "*"
|
||||
|
||||
[package.extras]
|
||||
develop = ["build", "twine"]
|
||||
|
||||
[[package]]
|
||||
name = "flask"
|
||||
version = "2.2.2"
|
||||
|
@ -595,7 +598,7 @@ tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-s
|
|||
|
||||
[[package]]
|
||||
name = "flask-migrate"
|
||||
version = "3.1.0"
|
||||
version = "4.0.0"
|
||||
description = "SQLAlchemy database migrations for Flask applications using Alembic."
|
||||
category = "main"
|
||||
optional = false
|
||||
|
@ -916,7 +919,7 @@ mypy-extensions = "*"
|
|||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "0.990"
|
||||
version = "0.991"
|
||||
description = "Optional static typing for Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
|
@ -1501,7 +1504,7 @@ test = ["pytest"]
|
|||
[[package]]
|
||||
name = "SpiffWorkflow"
|
||||
version = "1.2.1"
|
||||
description = "A workflow framework and BPMN/DMN Processor"
|
||||
description = ""
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
@ -1517,7 +1520,7 @@ lxml = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "580939cc8cb0b7ade1571483bd1e28f554434ac4"
|
||||
resolved_reference = "025bc30f27366e06dd1286b7563e4b1cb04c1c46"
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
|
@ -1767,7 +1770,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=
|
|||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.7"
|
||||
content-hash = "59fd1a96fd43ad04e57500a4a2f9c2a8b09279872f0277736d52a827634977a6"
|
||||
content-hash = "6dfda037ebb3024834a45670108756a3057fff1b6fb5b916d222d3a162509b7d"
|
||||
|
||||
[metadata.files]
|
||||
alabaster = [
|
||||
|
@ -2045,8 +2048,8 @@ flake8-polyfill = [
|
|||
{file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"},
|
||||
]
|
||||
flake8-rst-docstrings = [
|
||||
{file = "flake8-rst-docstrings-0.2.7.tar.gz", hash = "sha256:2740067ab9237559dd45a3434d8c987792c7b259ca563621a3b95efe201f5382"},
|
||||
{file = "flake8_rst_docstrings-0.2.7-py3-none-any.whl", hash = "sha256:5d56075dce360bcc9c6775bfe7cb431aa395de600ca7e8d40580a28d50b2a803"},
|
||||
{file = "flake8-rst-docstrings-0.3.0.tar.gz", hash = "sha256:d1ce22b4bd37b73cd86b8d980e946ef198cfcc18ed82fedb674ceaa2f8d1afa4"},
|
||||
{file = "flake8_rst_docstrings-0.3.0-py3-none-any.whl", hash = "sha256:f8c3c6892ff402292651c31983a38da082480ad3ba253743de52989bdc84ca1c"},
|
||||
]
|
||||
flask = [
|
||||
{file = "Flask-2.2.2-py3-none-any.whl", hash = "sha256:b9c46cc36662a7949f34b52d8ec7bb59c0d74ba08ba6cb9ce9adc1d8676d9526"},
|
||||
|
@ -2071,8 +2074,8 @@ flask-marshmallow = [
|
|||
{file = "flask_marshmallow-0.14.0-py2.py3-none-any.whl", hash = "sha256:2adcd782b5a4a6c5ae3c96701f320d8ca6997995a52b2661093c56cc3ed24754"},
|
||||
]
|
||||
flask-migrate = [
|
||||
{file = "Flask-Migrate-3.1.0.tar.gz", hash = "sha256:57d6060839e3a7f150eaab6fe4e726d9e3e7cffe2150fb223d73f92421c6d1d9"},
|
||||
{file = "Flask_Migrate-3.1.0-py3-none-any.whl", hash = "sha256:a6498706241aba6be7a251078de9cf166d74307bca41a4ca3e403c9d39e2f897"},
|
||||
{file = "Flask-Migrate-4.0.0.tar.gz", hash = "sha256:2a301c3040af6844f29d9149abe428a0f08ebc8fa149e72113bbb36fa341920a"},
|
||||
{file = "Flask_Migrate-4.0.0-py3-none-any.whl", hash = "sha256:e75a46b657e3062296b9f6e92f31e19662f76cfee8abd6ae94640cbcb79fe016"},
|
||||
]
|
||||
flask-restful = [
|
||||
{file = "Flask-RESTful-0.3.9.tar.gz", hash = "sha256:ccec650b835d48192138c85329ae03735e6ced58e9b2d9c2146d6c84c06fa53e"},
|
||||
|
@ -2391,36 +2394,36 @@ monkeytype = [
|
|||
{file = "MonkeyType-22.2.0.tar.gz", hash = "sha256:6b0c00b49dcc5095a2c08d28246cf005e05673fc51f64d203f9a6bca2036dfab"},
|
||||
]
|
||||
mypy = [
|
||||
{file = "mypy-0.990-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:aaf1be63e0207d7d17be942dcf9a6b641745581fe6c64df9a38deb562a7dbafa"},
|
||||
{file = "mypy-0.990-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d555aa7f44cecb7ea3c0ac69d58b1a5afb92caa017285a8e9c4efbf0518b61b4"},
|
||||
{file = "mypy-0.990-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f694d6d09a460b117dccb6857dda269188e3437c880d7b60fa0014fa872d1e9"},
|
||||
{file = "mypy-0.990-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:269f0dfb6463b8780333310ff4b5134425157ef0d2b1d614015adaf6d6a7eabd"},
|
||||
{file = "mypy-0.990-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8798c8ed83aa809f053abff08664bdca056038f5a02af3660de00b7290b64c47"},
|
||||
{file = "mypy-0.990-cp310-cp310-win_amd64.whl", hash = "sha256:47a9955214615108c3480a500cfda8513a0b1cd3c09a1ed42764ca0dd7b931dd"},
|
||||
{file = "mypy-0.990-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4a8a6c10f4c63fbf6ad6c03eba22c9331b3946a4cec97f008e9ffb4d3b31e8e2"},
|
||||
{file = "mypy-0.990-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cd2dd3730ba894ec2a2082cc703fbf3e95a08479f7be84912e3131fc68809d46"},
|
||||
{file = "mypy-0.990-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7da0005e47975287a92b43276e460ac1831af3d23032c34e67d003388a0ce8d0"},
|
||||
{file = "mypy-0.990-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:262c543ef24deb10470a3c1c254bb986714e2b6b1a67d66daf836a548a9f316c"},
|
||||
{file = "mypy-0.990-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3ff201a0c6d3ea029d73b1648943387d75aa052491365b101f6edd5570d018ea"},
|
||||
{file = "mypy-0.990-cp311-cp311-win_amd64.whl", hash = "sha256:1767830da2d1afa4e62b684647af0ff79b401f004d7fa08bc5b0ce2d45bcd5ec"},
|
||||
{file = "mypy-0.990-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6826d9c4d85bbf6d68cb279b561de6a4d8d778ca8e9ab2d00ee768ab501a9852"},
|
||||
{file = "mypy-0.990-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46897755f944176fbc504178422a5a2875bbf3f7436727374724842c0987b5af"},
|
||||
{file = "mypy-0.990-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0680389c34284287fe00e82fc8bccdea9aff318f7e7d55b90d967a13a9606013"},
|
||||
{file = "mypy-0.990-cp37-cp37m-win_amd64.whl", hash = "sha256:b08541a06eed35b543ae1a6b301590eb61826a1eb099417676ddc5a42aa151c5"},
|
||||
{file = "mypy-0.990-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:be88d665e76b452c26fb2bdc3d54555c01226fba062b004ede780b190a50f9db"},
|
||||
{file = "mypy-0.990-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b8f4a8213b1fd4b751e26b59ae0e0c12896568d7e805861035c7a15ed6dc9eb"},
|
||||
{file = "mypy-0.990-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b6f85c2ad378e3224e017904a051b26660087b3b76490d533b7344f1546d3ff"},
|
||||
{file = "mypy-0.990-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ee5f99817ee70254e7eb5cf97c1b11dda29c6893d846c8b07bce449184e9466"},
|
||||
{file = "mypy-0.990-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49082382f571c3186ce9ea0bd627cb1345d4da8d44a8377870f4442401f0a706"},
|
||||
{file = "mypy-0.990-cp38-cp38-win_amd64.whl", hash = "sha256:aba38e3dd66bdbafbbfe9c6e79637841928ea4c79b32e334099463c17b0d90ef"},
|
||||
{file = "mypy-0.990-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9d851c09b981a65d9d283a8ccb5b1d0b698e580493416a10942ef1a04b19fd37"},
|
||||
{file = "mypy-0.990-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d847dd23540e2912d9667602271e5ebf25e5788e7da46da5ffd98e7872616e8e"},
|
||||
{file = "mypy-0.990-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cc6019808580565040cd2a561b593d7c3c646badd7e580e07d875eb1bf35c695"},
|
||||
{file = "mypy-0.990-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a3150d409609a775c8cb65dbe305c4edd7fe576c22ea79d77d1454acd9aeda8"},
|
||||
{file = "mypy-0.990-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3227f14fe943524f5794679156488f18bf8d34bfecd4623cf76bc55958d229c5"},
|
||||
{file = "mypy-0.990-cp39-cp39-win_amd64.whl", hash = "sha256:c76c769c46a1e6062a84837badcb2a7b0cdb153d68601a61f60739c37d41cc74"},
|
||||
{file = "mypy-0.990-py3-none-any.whl", hash = "sha256:8f1940325a8ed460ba03d19ab83742260fa9534804c317224e5d4e5aa588e2d6"},
|
||||
{file = "mypy-0.990.tar.gz", hash = "sha256:72382cb609142dba3f04140d016c94b4092bc7b4d98ca718740dc989e5271b8d"},
|
||||
{file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"},
|
||||
{file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"},
|
||||
{file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"},
|
||||
{file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"},
|
||||
{file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"},
|
||||
{file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"},
|
||||
{file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"},
|
||||
{file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"},
|
||||
{file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"},
|
||||
{file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"},
|
||||
{file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"},
|
||||
{file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"},
|
||||
{file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"},
|
||||
{file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"},
|
||||
{file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"},
|
||||
{file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"},
|
||||
{file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"},
|
||||
{file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"},
|
||||
{file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"},
|
||||
{file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"},
|
||||
{file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"},
|
||||
{file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"},
|
||||
{file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"},
|
||||
{file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"},
|
||||
{file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"},
|
||||
{file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"},
|
||||
{file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"},
|
||||
{file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"},
|
||||
{file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"},
|
||||
{file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"},
|
||||
]
|
||||
mypy-extensions = [
|
||||
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
|
||||
|
|
|
@ -37,7 +37,7 @@ greenlet = "^2.0.1"
|
|||
pytest = "^6.2.5"
|
||||
coverage = {extras = ["toml"], version = "^6.5"}
|
||||
safety = "^2.3.1"
|
||||
mypy = "^0.990"
|
||||
mypy = "^0.991"
|
||||
typeguard = "^2.13.2"
|
||||
xdoctest = {extras = ["colors"], version = "^1.1.0"}
|
||||
sphinx = "^4.3.0"
|
||||
|
@ -55,7 +55,7 @@ bandit = "1.7.2"
|
|||
|
||||
flake8-bugbear = "^22.10.27"
|
||||
flake8-docstrings = "^1.6.0"
|
||||
flake8-rst-docstrings = "^0.2.7"
|
||||
flake8-rst-docstrings = "^0.3.0"
|
||||
pep8-naming = "^0.13.2"
|
||||
darglint = "^1.8.1"
|
||||
reorder-python-imports = "^3.9.0"
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
"""Updates all JSON files, based on the current state of BPMN_SPEC_ABSOLUTE_DIR."""
|
||||
from spiffworkflow_backend import get_hacked_up_app_for_script
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Main."""
|
||||
app = get_hacked_up_app_for_script()
|
||||
with app.app_context():
|
||||
|
||||
groups = ProcessModelService().get_process_groups()
|
||||
for group in groups:
|
||||
for process_model in group.process_models:
|
||||
update_items = {
|
||||
"process_group_id": "",
|
||||
"id": f"{group.id}/{process_model.id}",
|
||||
}
|
||||
ProcessModelService().update_spec(process_model, update_items)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,8 +1,8 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: b7790c9c8174
|
||||
Revision ID: 70223f5c7b98
|
||||
Revises:
|
||||
Create Date: 2022-11-15 14:11:47.309399
|
||||
Create Date: 2022-11-20 19:54:45.061376
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
@ -10,7 +10,7 @@ import sqlalchemy as sa
|
|||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b7790c9c8174'
|
||||
revision = '70223f5c7b98'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
|
@ -44,12 +44,12 @@ python-versions = "*"
|
|||
dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"]
|
||||
|
||||
[[package]]
|
||||
name = "APScheduler"
|
||||
version = "3.9.1"
|
||||
name = "apscheduler"
|
||||
version = "3.9.1.post1"
|
||||
description = "In-process task scheduler with Cron-like capabilities"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
||||
python-versions = "!=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
||||
|
||||
[package.dependencies]
|
||||
pytz = "*"
|
||||
|
@ -477,6 +477,17 @@ six = ">=1.9.0"
|
|||
gmpy = ["gmpy"]
|
||||
gmpy2 = ["gmpy2"]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.0.4"
|
||||
description = "Backport of PEP 654 (exception groups)"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest (>=6)"]
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.8.0"
|
||||
|
@ -929,22 +940,6 @@ category = "main"
|
|||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
version = "0.4.7"
|
||||
description = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
pyyaml = ">=5.2"
|
||||
typing-extensions = ">=3.7.4.2"
|
||||
typing-inspect = ">=0.4.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["black (==22.3.0)", "coverage (>=4.5.4)", "fixit (==0.1.1)", "flake8 (>=3.7.8)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.0.3)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<0.9)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.9)", "setuptools-rust (>=0.12.1)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==1.3)", "usort (==1.0.0rc1)"]
|
||||
|
||||
[[package]]
|
||||
name = "livereload"
|
||||
version = "2.6.3"
|
||||
|
@ -1050,18 +1045,6 @@ category = "dev"
|
|||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "MonkeyType"
|
||||
version = "22.2.0"
|
||||
description = "Generating type annotations from sampled production types"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
libcst = ">=0.3.7"
|
||||
mypy-extensions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "0.982"
|
||||
|
@ -1240,14 +1223,6 @@ category = "main"
|
|||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "py"
|
||||
version = "1.11.0"
|
||||
description = "library with cross-python path, ini-parsing, io, code, log facilities"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
|
||||
[[package]]
|
||||
name = "pyasn1"
|
||||
version = "0.4.8"
|
||||
|
@ -1332,7 +1307,7 @@ python-versions = ">=3.7"
|
|||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "7.1.3"
|
||||
version = "7.2.0"
|
||||
description = "pytest: simple powerful testing with Python"
|
||||
category = "main"
|
||||
optional = false
|
||||
|
@ -1341,11 +1316,11 @@ python-versions = ">=3.7"
|
|||
[package.dependencies]
|
||||
attrs = ">=19.2.0"
|
||||
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
||||
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
|
||||
iniconfig = "*"
|
||||
packaging = "*"
|
||||
pluggy = ">=0.12,<2.0"
|
||||
py = ">=1.8.2"
|
||||
tomli = ">=1.0.0"
|
||||
tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[package.extras]
|
||||
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
|
||||
|
@ -1876,7 +1851,7 @@ lxml = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "eea53c912984d21a064330c3b3334ac219cb8e18"
|
||||
resolved_reference = "46f410a2852baeedc8f9ac5165347ce6d4470594"
|
||||
|
||||
[[package]]
|
||||
name = "SQLAlchemy"
|
||||
|
@ -2094,18 +2069,6 @@ category = "main"
|
|||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "typing-inspect"
|
||||
version = "0.8.0"
|
||||
description = "Runtime inspection utilities for typing module."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
mypy-extensions = ">=0.3.0"
|
||||
typing-extensions = ">=3.7.4"
|
||||
|
||||
[[package]]
|
||||
name = "tzdata"
|
||||
version = "2022.5"
|
||||
|
@ -2259,7 +2222,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools"
|
|||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = ">=3.9,<3.12"
|
||||
content-hash = "a6d3882a3ab142b82201b83ee8a0552fd16112c4540e2a1dbcb5c38599b917c1"
|
||||
content-hash = "bbbd1c8bdce7f3dd7ec17c62b85dc7c95045fe500a759bb1a89c93add58a2a25"
|
||||
|
||||
[metadata.files]
|
||||
alabaster = [
|
||||
|
@ -2278,9 +2241,9 @@ aniso8601 = [
|
|||
{file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"},
|
||||
{file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"},
|
||||
]
|
||||
APScheduler = [
|
||||
{file = "APScheduler-3.9.1-py2.py3-none-any.whl", hash = "sha256:ddc25a0ddd899de44d7f451f4375fb971887e65af51e41e5dcf681f59b8b2c9a"},
|
||||
{file = "APScheduler-3.9.1.tar.gz", hash = "sha256:65e6574b6395498d371d045f2a8a7e4f7d50c6ad21ef7313d15b1c7cf20df1e3"},
|
||||
apscheduler = [
|
||||
{file = "APScheduler-3.9.1.post1-py2.py3-none-any.whl", hash = "sha256:c8c618241dbb2785ed5a687504b14cb1851d6f7b5a4edf3a51e39cc6a069967a"},
|
||||
{file = "APScheduler-3.9.1.post1.tar.gz", hash = "sha256:b2bea0309569da53a7261bfa0ce19c67ddbfe151bda776a6a907579fdbd3eb2a"},
|
||||
]
|
||||
astroid = [
|
||||
{file = "astroid-2.12.12-py3-none-any.whl", hash = "sha256:72702205200b2a638358369d90c222d74ebc376787af8fb2f7f2a86f7b5cc85f"},
|
||||
|
@ -2484,6 +2447,10 @@ ecdsa = [
|
|||
{file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"},
|
||||
{file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"},
|
||||
]
|
||||
exceptiongroup = [
|
||||
{file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"},
|
||||
{file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"},
|
||||
]
|
||||
filelock = [
|
||||
{file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"},
|
||||
{file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"},
|
||||
|
@ -2703,32 +2670,6 @@ lazy-object-proxy = [
|
|||
{file = "lazy_object_proxy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61"},
|
||||
{file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"},
|
||||
]
|
||||
libcst = [
|
||||
{file = "libcst-0.4.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dc6f8965b6ca68d47e11321772887d81fa6fd8ea86e6ef87434ca2147de10747"},
|
||||
{file = "libcst-0.4.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f47d809df59fcd83058b777b86a300154ee3a1f1b0523a398a67b5f8affd4c"},
|
||||
{file = "libcst-0.4.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0d19de56aa733b4ef024527e3ce4896d4b0e9806889797f409ec24caa651a44"},
|
||||
{file = "libcst-0.4.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31da97bc986dc3f7a97f7d431fa911932aaf716d2f8bcda947fc964afd3b57cd"},
|
||||
{file = "libcst-0.4.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71b2e2c5e33e53669c20de0853cecfac1ffb8657ee727ab8527140f39049b820"},
|
||||
{file = "libcst-0.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:76fae68bd6b7ce069e267b3322c806b4305341cea78d161ae40e0ed641c8c660"},
|
||||
{file = "libcst-0.4.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bac76d69980bb3254f503f52128c256ef4d1bcbaabe4a17c3a9ebcd1fc0472c0"},
|
||||
{file = "libcst-0.4.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f86535271eaefe84a99736875566a038449f92e1a2a61ea0b588d8359fbefd"},
|
||||
{file = "libcst-0.4.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:617f7fa2610a8c86cf22d8d03416f25391383d05bd0ad1ca8ef68023ddd6b4f6"},
|
||||
{file = "libcst-0.4.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3637fffe476c5b4ee2225c6474b83382518f2c1b2fe4771039e06bdd7835a4a"},
|
||||
{file = "libcst-0.4.7-cp37-cp37m-win_amd64.whl", hash = "sha256:f56565124c2541adee0634e411b2126b3f335306d19e91ed2bfe52efa698b219"},
|
||||
{file = "libcst-0.4.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0ca2771ff3cfdf1f148349f89fcae64afa365213ed5c2703a69a89319325d0c8"},
|
||||
{file = "libcst-0.4.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aa438131b7befc7e5a3cbadb5a7b1506305de5d62262ea0556add0152f40925e"},
|
||||
{file = "libcst-0.4.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6bd66a8be2ffad7b968d90dae86c62fd4739c0e011d71f3e76544a891ae743"},
|
||||
{file = "libcst-0.4.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:214a9c4f4f90cd5b4bfa18e17877da4dd9a896821d9af9be86fa3effdc289b9b"},
|
||||
{file = "libcst-0.4.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a37f2b459a8b51a41e260bd89c24ae41ab1d658f610c91650c79b1bbf27138"},
|
||||
{file = "libcst-0.4.7-cp38-cp38-win_amd64.whl", hash = "sha256:2f6766391d90472f036b88a95251c87d498ab068c377724f212ab0cc20509a68"},
|
||||
{file = "libcst-0.4.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:234293aa8681a3d47fef1716c5622797a81cbe85a9381fe023815468cfe20eed"},
|
||||
{file = "libcst-0.4.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fa618dc359663a0a097c633452b104c1ca93365da7a811e655c6944f6b323239"},
|
||||
{file = "libcst-0.4.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3569d9901c18940632414fb7a0943bffd326db9f726a9c041664926820857815"},
|
||||
{file = "libcst-0.4.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beb5347e46b419f782589da060e9300957e71d561aa5574309883b71f93c1dfe"},
|
||||
{file = "libcst-0.4.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e541ccfeebda1ae5f005fc120a5bf3e8ac9ccfda405ec3efd3df54fc4688ac3"},
|
||||
{file = "libcst-0.4.7-cp39-cp39-win_amd64.whl", hash = "sha256:3a2b7253cd2e3f0f8a3e23b5c2acb492811d865ef36e0816091c925f32b713d2"},
|
||||
{file = "libcst-0.4.7.tar.gz", hash = "sha256:95c52c2130531f6e726a3b077442cfd486975435fecf3db8224d43fba7b85099"},
|
||||
]
|
||||
livereload = [
|
||||
{file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"},
|
||||
]
|
||||
|
@ -2866,10 +2807,6 @@ mccabe = [
|
|||
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
|
||||
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
|
||||
]
|
||||
MonkeyType = [
|
||||
{file = "MonkeyType-22.2.0-py3-none-any.whl", hash = "sha256:3d0815c7e98a18e9267990a452548247f6775fd636e65df5a7d77100ea7ad282"},
|
||||
{file = "MonkeyType-22.2.0.tar.gz", hash = "sha256:6b0c00b49dcc5095a2c08d28246cf005e05673fc51f64d203f9a6bca2036dfab"},
|
||||
]
|
||||
mypy = [
|
||||
{file = "mypy-0.982-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5085e6f442003fa915aeb0a46d4da58128da69325d8213b4b35cc7054090aed5"},
|
||||
{file = "mypy-0.982-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41fd1cf9bc0e1c19b9af13a6580ccb66c381a5ee2cf63ee5ebab747a4badeba3"},
|
||||
|
@ -3051,10 +2988,6 @@ psycopg2 = [
|
|||
{file = "psycopg2-2.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:849bd868ae3369932127f0771c08d1109b254f08d48dc42493c3d1b87cb2d308"},
|
||||
{file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"},
|
||||
]
|
||||
py = [
|
||||
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
|
||||
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
|
||||
]
|
||||
pyasn1 = [
|
||||
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
|
||||
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
|
||||
|
@ -3118,8 +3051,8 @@ pyrsistent = [
|
|||
{file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"},
|
||||
]
|
||||
pytest = [
|
||||
{file = "pytest-7.1.3-py3-none-any.whl", hash = "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7"},
|
||||
{file = "pytest-7.1.3.tar.gz", hash = "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39"},
|
||||
{file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"},
|
||||
{file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"},
|
||||
]
|
||||
pytest-flask = [
|
||||
{file = "pytest-flask-1.2.0.tar.gz", hash = "sha256:46fde652f77777bf02dc91205aec4ce20cdf2acbbbd66a918ab91f5c14693d3d"},
|
||||
|
@ -3598,10 +3531,6 @@ typing-extensions = [
|
|||
{file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
|
||||
{file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
|
||||
]
|
||||
typing-inspect = [
|
||||
{file = "typing_inspect-0.8.0-py3-none-any.whl", hash = "sha256:5fbf9c1e65d4fa01e701fe12a5bca6c6e08a4ffd5bc60bfac028253a447c5188"},
|
||||
{file = "typing_inspect-0.8.0.tar.gz", hash = "sha256:8b1ff0c400943b6145df8119c41c244ca8207f1f10c9c057aeed1560e4806e3d"},
|
||||
]
|
||||
tzdata = [
|
||||
{file = "tzdata-2022.5-py2.py3-none-any.whl", hash = "sha256:323161b22b7802fdc78f20ca5f6073639c64f1a7227c40cd3e19fd1d0ce6650a"},
|
||||
{file = "tzdata-2022.5.tar.gz", hash = "sha256:e15b2b3005e2546108af42a0eb4ccab4d9e225e2dfbf4f77aad50c70a4b1f3ab"},
|
||||
|
|
|
@ -33,7 +33,7 @@ sentry-sdk = "^1.10"
|
|||
sphinx-autoapi = "^2.0"
|
||||
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"}
|
||||
# flask-bpmn = {develop = true, path = "../flask-bpmn"}
|
||||
mysql-connector-python = "^8.0.29"
|
||||
mysql-connector-python = "*"
|
||||
pytest-flask = "^1.2.0"
|
||||
pytest-flask-sqlalchemy = "^1.1.0"
|
||||
psycopg2 = "^2.9.3"
|
||||
|
@ -45,7 +45,7 @@ marshmallow-sqlalchemy = "^0.28.0"
|
|||
PyJWT = "^2.6.0"
|
||||
gunicorn = "^20.1.0"
|
||||
python-keycloak = "^2.5.0"
|
||||
APScheduler = "^3.9.1"
|
||||
APScheduler = "*"
|
||||
Jinja2 = "^3.1.2"
|
||||
RestrictedPython = "^6.0"
|
||||
Flask-SQLAlchemy = "^3"
|
||||
|
@ -75,7 +75,7 @@ types-dateparser = "^1.1.4.1"
|
|||
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
pytest = "^7.1.2"
|
||||
pytest = "*"
|
||||
coverage = {extras = ["toml"], version = "^6.1"}
|
||||
safety = "^2.3.1"
|
||||
mypy = ">=0.961"
|
||||
|
@ -103,7 +103,6 @@ sphinx-click = "^4.3.0"
|
|||
Pygments = "^2.10.0"
|
||||
pyupgrade = "^3.1.0"
|
||||
furo = ">=2021.11.12"
|
||||
MonkeyType = "^22.2.0"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
spiffworkflow-backend = "spiffworkflow_backend.__main__:main"
|
||||
|
|
|
@ -67,9 +67,9 @@ def start_scheduler(
|
|||
seconds=10,
|
||||
)
|
||||
scheduler.add_job(
|
||||
BackgroundProcessingService(app).run,
|
||||
BackgroundProcessingService(app).process_waiting_process_instances,
|
||||
"interval",
|
||||
seconds=30,
|
||||
seconds=10,
|
||||
)
|
||||
scheduler.start()
|
||||
|
||||
|
|
|
@ -238,6 +238,33 @@ paths:
|
|||
schema:
|
||||
$ref: "#/components/schemas/ProcessModelCategory"
|
||||
|
||||
/process-groups/{modified_process_group_identifier}/move:
|
||||
parameters:
|
||||
- name: modified_process_group_identifier
|
||||
in: path
|
||||
required: true
|
||||
description: The unique id of an existing process group.
|
||||
schema:
|
||||
type: string
|
||||
- name: new_location
|
||||
in: query
|
||||
required: true
|
||||
description: the new location, as an existing process group id
|
||||
schema:
|
||||
type: string
|
||||
put:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_move
|
||||
summary: returns the new group
|
||||
tags:
|
||||
- Process Groups
|
||||
responses:
|
||||
"200":
|
||||
description: Process Group
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/ProcessModelCategory"
|
||||
|
||||
/process-models:
|
||||
parameters:
|
||||
- name: process_group_identifier
|
||||
|
@ -246,6 +273,12 @@ paths:
|
|||
description: The group containing the models we want to return
|
||||
schema:
|
||||
type: string
|
||||
- name: recursive
|
||||
in: query
|
||||
required: false
|
||||
description: Get all sub process models recursively if true
|
||||
schema:
|
||||
type: string
|
||||
- name: page
|
||||
in: query
|
||||
required: false
|
||||
|
@ -274,6 +307,13 @@ paths:
|
|||
$ref: "#/components/schemas/ProcessModel"
|
||||
|
||||
/process-models/{modified_process_group_id}:
|
||||
parameters:
|
||||
- name: modified_process_group_id
|
||||
in: path
|
||||
required: true
|
||||
description: modified id of an existing process group
|
||||
schema:
|
||||
type: string
|
||||
post:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_create
|
||||
summary: Creates a new process model with the given parameters.
|
||||
|
@ -372,11 +412,40 @@ paths:
|
|||
schema:
|
||||
$ref: "#/components/schemas/OkTrue"
|
||||
|
||||
/process-models/{modified_process_model_identifier}/move:
|
||||
parameters:
|
||||
- name: modified_process_model_identifier
|
||||
in: path
|
||||
required: true
|
||||
description: the modified process model id
|
||||
schema:
|
||||
type: string
|
||||
- name: new_location
|
||||
in: query
|
||||
required: true
|
||||
description: the new location for the process model, as a process group id
|
||||
schema:
|
||||
type: string
|
||||
put:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_move
|
||||
summary: returns the new model
|
||||
tags:
|
||||
- Process Models
|
||||
responses:
|
||||
"200":
|
||||
description: Process Model
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/ProcessModel"
|
||||
|
||||
|
||||
/processes:
|
||||
get:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_list
|
||||
summary: Return a list of all processes (not just primary process of a process model)
|
||||
useful for finding processes for call activites.
|
||||
summary:
|
||||
Return a list of all processes (not just primary process of a process model)
|
||||
useful for finding processes for call activites.
|
||||
tags:
|
||||
- Process Models
|
||||
responses:
|
||||
|
@ -445,7 +514,12 @@ paths:
|
|||
description: For filtering - indicates the user has manually entered a query
|
||||
schema:
|
||||
type: boolean
|
||||
# process_instance_list
|
||||
- name: report_identifier
|
||||
in: query
|
||||
required: false
|
||||
description: Specifies the identifier of a report to use, if any
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_list
|
||||
summary: Returns a list of process instances for a given process model
|
||||
|
@ -984,6 +1058,12 @@ paths:
|
|||
|
||||
/process-instances/{modified_process_model_id}/{process_instance_id}/tasks:
|
||||
parameters:
|
||||
- name: modified_process_model_id
|
||||
in: path
|
||||
required: true
|
||||
description: The modified id of an existing process model
|
||||
schema:
|
||||
type: string
|
||||
- name: process_instance_id
|
||||
in: path
|
||||
required: true
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import field
|
||||
from typing import Any
|
||||
|
@ -31,7 +32,7 @@ class ProcessGroup:
|
|||
|
||||
def __post_init__(self) -> None:
|
||||
"""__post_init__."""
|
||||
self.sort_index = self.id
|
||||
self.sort_index = self.display_name
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
"""__eq__."""
|
||||
|
@ -47,6 +48,11 @@ class ProcessGroup:
|
|||
original_dict = dataclasses.asdict(self)
|
||||
return {x: original_dict[x] for x in original_dict if x not in ["sort_index"]}
|
||||
|
||||
# for use with os.path.join, so it can work on windows
|
||||
def id_for_file_path(self) -> str:
|
||||
"""Id_for_file_path."""
|
||||
return self.id.replace("/", os.sep)
|
||||
|
||||
|
||||
class ProcessGroupSchema(Schema):
|
||||
"""ProcessGroupSchema."""
|
||||
|
@ -55,13 +61,24 @@ class ProcessGroupSchema(Schema):
|
|||
"""Meta."""
|
||||
|
||||
model = ProcessGroup
|
||||
fields = ["id", "display_name", "display_order", "admin", "process_models"]
|
||||
fields = [
|
||||
"id",
|
||||
"display_name",
|
||||
"display_order",
|
||||
"admin",
|
||||
"process_models",
|
||||
"description",
|
||||
"process_groups",
|
||||
]
|
||||
|
||||
process_models = marshmallow.fields.List(
|
||||
marshmallow.fields.Nested(
|
||||
"ProcessModelInfoSchema", dump_only=True, required=False
|
||||
)
|
||||
)
|
||||
process_groups = marshmallow.fields.List(
|
||||
marshmallow.fields.Nested("ProcessGroupSchema", dump_only=True, required=False)
|
||||
)
|
||||
|
||||
@post_load
|
||||
def make_process_group(
|
||||
|
|
|
@ -71,36 +71,6 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
|
|||
created_at_in_seconds = db.Column(db.Integer)
|
||||
updated_at_in_seconds = db.Column(db.Integer)
|
||||
|
||||
@classmethod
|
||||
def default_report(cls, user: UserModel) -> ProcessInstanceReportModel:
|
||||
"""Default_report."""
|
||||
identifier = "default"
|
||||
process_instance_report = ProcessInstanceReportModel.query.filter_by(
|
||||
identifier=identifier, created_by_id=user.id
|
||||
).first()
|
||||
|
||||
if process_instance_report is None:
|
||||
report_metadata = {
|
||||
"columns": [
|
||||
{"Header": "id", "accessor": "id"},
|
||||
{
|
||||
"Header": "process_model_identifier",
|
||||
"accessor": "process_model_identifier",
|
||||
},
|
||||
{"Header": "start_in_seconds", "accessor": "start_in_seconds"},
|
||||
{"Header": "end_in_seconds", "accessor": "end_in_seconds"},
|
||||
{"Header": "status", "accessor": "status"},
|
||||
],
|
||||
}
|
||||
|
||||
process_instance_report = cls(
|
||||
identifier=identifier,
|
||||
created_by_id=user.id,
|
||||
report_metadata=report_metadata,
|
||||
)
|
||||
|
||||
return process_instance_report # type: ignore
|
||||
|
||||
@classmethod
|
||||
def add_fixtures(cls) -> None:
|
||||
"""Add_fixtures."""
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
import uuid
|
||||
|
@ -235,6 +234,19 @@ def process_group_show(
|
|||
return make_response(jsonify(process_group), 200)
|
||||
|
||||
|
||||
def process_group_move(
|
||||
modified_process_group_identifier: str, new_location: str
|
||||
) -> flask.wrappers.Response:
|
||||
"""process_group_move."""
|
||||
original_process_group_id = un_modify_modified_process_model_id(
|
||||
modified_process_group_identifier
|
||||
)
|
||||
new_process_group = ProcessModelService().process_group_move(
|
||||
original_process_group_id, new_location
|
||||
)
|
||||
return make_response(jsonify(new_process_group), 201)
|
||||
|
||||
|
||||
def process_model_create(
|
||||
modified_process_group_id: str, body: Dict[str, Union[str, bool, int]]
|
||||
) -> flask.wrappers.Response:
|
||||
|
@ -253,16 +265,11 @@ def process_model_create(
|
|||
status_code=400,
|
||||
)
|
||||
|
||||
modified_process_model_id = process_model_info.id
|
||||
unmodified_process_model_id = un_modify_modified_process_model_id(
|
||||
modified_process_model_id
|
||||
unmodified_process_group_id = un_modify_modified_process_model_id(
|
||||
modified_process_group_id
|
||||
)
|
||||
process_model_info.id = unmodified_process_model_id
|
||||
process_group_id, _ = os.path.split(process_model_info.id)
|
||||
process_model_service = ProcessModelService()
|
||||
process_group = process_model_service.get_process_group(
|
||||
un_modify_modified_process_model_id(process_group_id)
|
||||
)
|
||||
process_group = process_model_service.get_process_group(unmodified_process_group_id)
|
||||
if process_group is None:
|
||||
raise ApiError(
|
||||
error_code="process_model_could_not_be_created",
|
||||
|
@ -270,7 +277,7 @@ def process_model_create(
|
|||
status_code=400,
|
||||
)
|
||||
|
||||
process_model_service.add_spec(process_model_info)
|
||||
process_model_service.add_process_model(process_model_info)
|
||||
return Response(
|
||||
json.dumps(ProcessModelInfoSchema().dump(process_model_info)),
|
||||
status=201,
|
||||
|
@ -307,7 +314,7 @@ def process_model_update(
|
|||
|
||||
# process_model_identifier = f"{process_group_id}/{process_model_id}"
|
||||
process_model = get_process_model(process_model_identifier)
|
||||
ProcessModelService().update_spec(process_model, body_filtered)
|
||||
ProcessModelService().update_process_model(process_model, body_filtered)
|
||||
return ProcessModelInfoSchema().dump(process_model)
|
||||
|
||||
|
||||
|
@ -326,12 +333,28 @@ def process_model_show(modified_process_model_identifier: str) -> Any:
|
|||
return process_model_json
|
||||
|
||||
|
||||
def process_model_move(
|
||||
modified_process_model_identifier: str, new_location: str
|
||||
) -> flask.wrappers.Response:
|
||||
"""process_model_move."""
|
||||
original_process_model_id = un_modify_modified_process_model_id(
|
||||
modified_process_model_identifier
|
||||
)
|
||||
new_process_model = ProcessModelService().process_model_move(
|
||||
original_process_model_id, new_location
|
||||
)
|
||||
return make_response(jsonify(new_process_model), 201)
|
||||
|
||||
|
||||
def process_model_list(
|
||||
process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
|
||||
process_group_identifier: Optional[str] = None,
|
||||
recursive: Optional[bool] = False,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process model list!"""
|
||||
process_models = ProcessModelService().get_process_models(
|
||||
process_group_id=process_group_identifier
|
||||
process_group_id=process_group_identifier, recursive=recursive
|
||||
)
|
||||
batch = ProcessModelService().get_batch(
|
||||
process_models, page=page, per_page=per_page
|
||||
|
@ -736,9 +759,12 @@ def process_instance_list(
|
|||
end_to: Optional[int] = None,
|
||||
process_status: Optional[str] = None,
|
||||
user_filter: Optional[bool] = False,
|
||||
report_identifier: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_list."""
|
||||
process_instance_report = ProcessInstanceReportModel.default_report(g.user)
|
||||
process_instance_report = ProcessInstanceReportService.report_with_identifier(
|
||||
g.user, report_identifier
|
||||
)
|
||||
|
||||
if user_filter:
|
||||
report_filter = ProcessInstanceReportFilter(
|
||||
|
@ -811,21 +837,16 @@ def process_instance_list(
|
|||
ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
|
||||
).paginate(page=page, per_page=per_page, error_out=False)
|
||||
|
||||
# TODO need to look into test failures when the results from result_dict is
|
||||
# used instead of the process instances
|
||||
|
||||
# substitution_variables = request.args.to_dict()
|
||||
# result_dict = process_instance_report.generate_report(
|
||||
# process_instances.items, substitution_variables
|
||||
# )
|
||||
|
||||
# results = result_dict["results"]
|
||||
# report_metadata = result_dict["report_metadata"]
|
||||
|
||||
results = process_instances.items
|
||||
results = list(
|
||||
map(
|
||||
ProcessInstanceService.serialize_flat_with_task_data,
|
||||
process_instances.items,
|
||||
)
|
||||
)
|
||||
report_metadata = process_instance_report.report_metadata
|
||||
|
||||
response_json = {
|
||||
"report_identifier": process_instance_report.identifier,
|
||||
"report_metadata": report_metadata,
|
||||
"results": results,
|
||||
"filters": report_filter.to_dict(),
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
"""Get_env."""
|
||||
from typing import Any
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.group import GroupNotFoundError
|
||||
from spiffworkflow_backend.models.script_attributes_context import (
|
||||
ScriptAttributesContext,
|
||||
)
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.models.user import UserNotFoundError
|
||||
from spiffworkflow_backend.scripts.script import Script
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
|
||||
class AddUserToGroup(Script):
|
||||
"""AddUserToGroup."""
|
||||
|
||||
def get_description(self) -> str:
|
||||
"""Get_description."""
|
||||
return """Add a given user to a given group."""
|
||||
|
||||
def run(
|
||||
self,
|
||||
script_attributes_context: ScriptAttributesContext,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
"""Run."""
|
||||
username = args[0]
|
||||
group_identifier = args[1]
|
||||
user = UserModel.query.filter_by(username=username).first()
|
||||
if user is None:
|
||||
raise UserNotFoundError(
|
||||
f"Script 'add_user_to_group' could not find a user with username: {username}"
|
||||
)
|
||||
|
||||
group = GroupModel.query.filter_by(identifier=group_identifier).first()
|
||||
if group is None:
|
||||
raise GroupNotFoundError(
|
||||
f"Script 'add_user_to_group' could not find group with identifier '{group_identifier}'."
|
||||
)
|
||||
|
||||
UserService.add_user_to_group(user, group)
|
|
@ -1,5 +1,4 @@
|
|||
"""Acceptance_test_fixtures."""
|
||||
import json
|
||||
import time
|
||||
|
||||
from flask import current_app
|
||||
|
@ -8,13 +7,15 @@ from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
|||
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
|
||||
|
||||
def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]:
|
||||
"""Load_fixtures."""
|
||||
current_app.logger.debug("load_acceptance_test_fixtures() start")
|
||||
test_process_group_id = ""
|
||||
test_process_model_id = "acceptance-tests-group-one/acceptance-tests-model-1"
|
||||
test_process_model_id = "misc/acceptance-tests-group-one/acceptance-tests-model-1"
|
||||
user = BaseTest.find_or_create_user()
|
||||
statuses = ProcessInstanceStatus.list()
|
||||
current_time = round(time.time())
|
||||
|
@ -28,16 +29,13 @@ def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]:
|
|||
# suspended - 6 hours ago
|
||||
process_instances = []
|
||||
for i in range(len(statuses)):
|
||||
process_instance = ProcessInstanceModel(
|
||||
status=statuses[i],
|
||||
process_initiator=user,
|
||||
process_model_identifier=test_process_model_id,
|
||||
process_group_identifier=test_process_group_id,
|
||||
updated_at_in_seconds=round(time.time()),
|
||||
start_in_seconds=current_time - (3600 * i),
|
||||
end_in_seconds=current_time - (3600 * i - 20),
|
||||
bpmn_json=json.dumps({"i": i}),
|
||||
|
||||
process_instance = ProcessInstanceService.create_process_instance(
|
||||
test_process_model_id, user
|
||||
)
|
||||
process_instance.status = statuses[i]
|
||||
process_instance.start_in_seconds = current_time - (3600 * i)
|
||||
process_instance.end_in_seconds = current_time - (3600 * i - 20)
|
||||
db.session.add(process_instance)
|
||||
process_instances.append(process_instance)
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ class BackgroundProcessingService:
|
|||
"""__init__."""
|
||||
self.app = app
|
||||
|
||||
def run(self) -> None:
|
||||
def process_waiting_process_instances(self) -> None:
|
||||
"""Since this runs in a scheduler, we need to specify the app context as well."""
|
||||
with self.app.app_context():
|
||||
ProcessInstanceService.do_waiting()
|
||||
|
|
|
@ -20,8 +20,8 @@ class FileSystemService:
|
|||
""" Simple Service meant for extension that provides some useful
|
||||
methods for dealing with the File system.
|
||||
"""
|
||||
CAT_JSON_FILE = "process_group.json"
|
||||
WF_JSON_FILE = "workflow.json"
|
||||
PROCESS_GROUP_JSON_FILE = "process_group.json"
|
||||
PROCESS_MODEL_JSON_FILE = "process_model.json"
|
||||
|
||||
@staticmethod
|
||||
def root_path() -> str:
|
||||
|
@ -31,10 +31,20 @@ class FileSystemService:
|
|||
app_root = current_app.root_path
|
||||
return os.path.join(app_root, "..", dir_name)
|
||||
|
||||
@staticmethod
|
||||
def id_string_to_relative_path(id_string: str) -> str:
|
||||
"""Id_string_to_relative_path."""
|
||||
return id_string.replace("/", os.sep)
|
||||
|
||||
@staticmethod
|
||||
def process_group_path(name: str) -> str:
|
||||
"""Category_path."""
|
||||
return os.path.abspath(os.path.join(FileSystemService.root_path(), name))
|
||||
return os.path.abspath(
|
||||
os.path.join(
|
||||
FileSystemService.root_path(),
|
||||
FileSystemService.id_string_to_relative_path(name),
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def full_path_from_relative_path(relative_path: str) -> str:
|
||||
|
@ -135,7 +145,7 @@ class FileSystemService:
|
|||
if item.is_file():
|
||||
if item.name.startswith("."):
|
||||
continue # Ignore hidden files
|
||||
if item.name == FileSystemService.WF_JSON_FILE:
|
||||
if item.name == FileSystemService.PROCESS_MODEL_JSON_FILE:
|
||||
continue # Ignore the json files.
|
||||
if file_name is not None and item.name != file_name:
|
||||
continue
|
||||
|
|
|
@ -216,7 +216,9 @@ class DBHandler(logging.Handler):
|
|||
bpmn_task_type = record.task_type if hasattr(record, "task_type") else None # type: ignore
|
||||
timestamp = record.created
|
||||
message = record.msg if hasattr(record, "msg") else None
|
||||
current_user_id = record.current_user_id if hasattr(record, "current_user_id") else None # type: ignore
|
||||
current_user_id = (
|
||||
record.current_user_id if hasattr(record, "current_user_id") else None # type: ignore
|
||||
)
|
||||
spiff_step = (
|
||||
record.spiff_step # type: ignore
|
||||
if hasattr(record, "spiff_step") and record.spiff_step is not None # type: ignore
|
||||
|
@ -236,5 +238,5 @@ class DBHandler(logging.Handler):
|
|||
"spiff_step": spiff_step,
|
||||
}
|
||||
)
|
||||
if len(self.logs) % 1000 == 0:
|
||||
if len(self.logs) % 1 == 0:
|
||||
self.bulk_insert_logs()
|
||||
|
|
|
@ -359,21 +359,8 @@ class ProcessInstanceProcessor:
|
|||
subprocesses=subprocesses,
|
||||
)
|
||||
self.bpmn_process_instance.script_engine = self._script_engine
|
||||
|
||||
self.add_user_info_to_process_instance(self.bpmn_process_instance)
|
||||
|
||||
if self.PROCESS_INSTANCE_ID_KEY not in self.bpmn_process_instance.data:
|
||||
if not process_instance_model.id:
|
||||
db.session.add(process_instance_model)
|
||||
# If the model is new, and has no id, save it, write it into the process_instance model
|
||||
# and save it again. In this way, the workflow process is always aware of the
|
||||
# database model to which it is associated, and scripts running within the model
|
||||
# can then load data as needed.
|
||||
self.bpmn_process_instance.data[
|
||||
ProcessInstanceProcessor.PROCESS_INSTANCE_ID_KEY
|
||||
] = process_instance_model.id
|
||||
self.save()
|
||||
|
||||
except MissingSpecError as ke:
|
||||
raise ApiError(
|
||||
error_code="unexpected_process_instance_structure",
|
||||
|
@ -675,7 +662,7 @@ class ProcessInstanceProcessor:
|
|||
bpmn_process_identifier: str,
|
||||
) -> Optional[str]:
|
||||
"""Backfill_missing_spec_reference_records."""
|
||||
process_models = ProcessModelService().get_process_models()
|
||||
process_models = ProcessModelService().get_process_models(recursive=True)
|
||||
for process_model in process_models:
|
||||
try:
|
||||
refs = SpecFileService.reference_map(
|
||||
|
|
|
@ -5,6 +5,7 @@ from typing import Optional
|
|||
from spiffworkflow_backend.models.process_instance_report import (
|
||||
ProcessInstanceReportModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
||||
|
||||
@dataclass
|
||||
|
@ -41,6 +42,81 @@ class ProcessInstanceReportFilter:
|
|||
class ProcessInstanceReportService:
|
||||
"""ProcessInstanceReportService."""
|
||||
|
||||
@classmethod
|
||||
def report_with_identifier(
|
||||
cls, user: UserModel, report_identifier: Optional[str] = None
|
||||
) -> ProcessInstanceReportModel:
|
||||
"""Report_with_filter."""
|
||||
if report_identifier is None:
|
||||
report_identifier = "default"
|
||||
|
||||
process_instance_report = ProcessInstanceReportModel.query.filter_by(
|
||||
identifier=report_identifier, created_by_id=user.id
|
||||
).first()
|
||||
|
||||
if process_instance_report is not None:
|
||||
return process_instance_report # type: ignore
|
||||
|
||||
# TODO replace with system reports that are loaded on launch (or similar)
|
||||
temp_system_metadata_map = {
|
||||
"default": {
|
||||
"columns": [
|
||||
{"Header": "id", "accessor": "id"},
|
||||
{
|
||||
"Header": "process_model_identifier",
|
||||
"accessor": "process_model_identifier",
|
||||
},
|
||||
{"Header": "start_in_seconds", "accessor": "start_in_seconds"},
|
||||
{"Header": "end_in_seconds", "accessor": "end_in_seconds"},
|
||||
{"Header": "status", "accessor": "status"},
|
||||
],
|
||||
},
|
||||
"system_report_instances_initiated_by_me": {
|
||||
"columns": [
|
||||
{
|
||||
"Header": "process_model_identifier",
|
||||
"accessor": "process_model_identifier",
|
||||
},
|
||||
{"Header": "start_in_seconds", "accessor": "start_in_seconds"},
|
||||
{"Header": "id", "accessor": "id"},
|
||||
{"Header": "end_in_seconds", "accessor": "end_in_seconds"},
|
||||
{"Header": "status", "accessor": "status"},
|
||||
],
|
||||
},
|
||||
"system_report_instances_with_tasks_completed_by_me": {
|
||||
"columns": [
|
||||
{"Header": "start_in_seconds", "accessor": "start_in_seconds"},
|
||||
{"Header": "end_in_seconds", "accessor": "end_in_seconds"},
|
||||
{"Header": "status", "accessor": "status"},
|
||||
{"Header": "id", "accessor": "id"},
|
||||
{
|
||||
"Header": "process_model_identifier",
|
||||
"accessor": "process_model_identifier",
|
||||
},
|
||||
],
|
||||
},
|
||||
"system_report_instances_with_tasks_completed_by_my_groups": {
|
||||
"columns": [
|
||||
{
|
||||
"Header": "process_model_identifier",
|
||||
"accessor": "process_model_identifier",
|
||||
},
|
||||
{"Header": "start_in_seconds", "accessor": "start_in_seconds"},
|
||||
{"Header": "end_in_seconds", "accessor": "end_in_seconds"},
|
||||
{"Header": "status", "accessor": "status"},
|
||||
{"Header": "id", "accessor": "id"},
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
process_instance_report = ProcessInstanceReportModel(
|
||||
identifier=report_identifier,
|
||||
created_by_id=user.id,
|
||||
report_metadata=temp_system_metadata_map[report_identifier],
|
||||
)
|
||||
|
||||
return process_instance_report # type: ignore
|
||||
|
||||
@classmethod
|
||||
def filter_by_to_dict(
|
||||
cls, process_instance_report: ProcessInstanceReportModel
|
||||
|
|
|
@ -315,3 +315,17 @@ class ProcessInstanceService:
|
|||
)
|
||||
|
||||
return task
|
||||
|
||||
@staticmethod
|
||||
def serialize_flat_with_task_data(
|
||||
process_instance: ProcessInstanceModel,
|
||||
) -> dict[str, Any]:
|
||||
"""Serialize_flat_with_task_data."""
|
||||
results = {}
|
||||
try:
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
process_instance.data = processor.get_current_data()
|
||||
results = process_instance.serialized_flat
|
||||
except ApiError:
|
||||
results = process_instance.serialized
|
||||
return results
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
import json
|
||||
import os
|
||||
import shutil
|
||||
from glob import glob
|
||||
from typing import Any
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
|
@ -32,22 +33,30 @@ class ProcessModelService(FileSystemService):
|
|||
the workflow process_models at once, or manage those file in a git repository. """
|
||||
|
||||
GROUP_SCHEMA = ProcessGroupSchema()
|
||||
WF_SCHEMA = ProcessModelInfoSchema()
|
||||
PROCESS_MODEL_SCHEMA = ProcessModelInfoSchema()
|
||||
|
||||
def is_group(self, path: str) -> bool:
|
||||
"""Is_group."""
|
||||
group_json_path = os.path.join(path, self.CAT_JSON_FILE)
|
||||
group_json_path = os.path.join(path, self.PROCESS_GROUP_JSON_FILE)
|
||||
if os.path.exists(group_json_path):
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_model(self, path: str) -> bool:
|
||||
"""Is_model."""
|
||||
model_json_path = os.path.join(path, self.WF_JSON_FILE)
|
||||
model_json_path = os.path.join(path, self.PROCESS_MODEL_JSON_FILE)
|
||||
if os.path.exists(model_json_path):
|
||||
return True
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def write_json_file(
|
||||
file_path: str, json_data: dict, indent: int = 4, sort_keys: bool = True
|
||||
) -> None:
|
||||
"""Write json file."""
|
||||
with open(file_path, "w") as h_open:
|
||||
json.dump(json_data, h_open, indent=indent, sort_keys=sort_keys)
|
||||
|
||||
@staticmethod
|
||||
def get_batch(
|
||||
items: list[T],
|
||||
|
@ -59,13 +68,13 @@ class ProcessModelService(FileSystemService):
|
|||
end = start + per_page
|
||||
return items[start:end]
|
||||
|
||||
def add_spec(self, process_model: ProcessModelInfo) -> None:
|
||||
def add_process_model(self, process_model: ProcessModelInfo) -> None:
|
||||
"""Add_spec."""
|
||||
display_order = self.next_display_order(process_model)
|
||||
process_model.display_order = display_order
|
||||
self.save_process_model(process_model)
|
||||
|
||||
def update_spec(
|
||||
def update_process_model(
|
||||
self, process_model: ProcessModelInfo, attributes_to_update: dict
|
||||
) -> None:
|
||||
"""Update_spec."""
|
||||
|
@ -76,13 +85,21 @@ class ProcessModelService(FileSystemService):
|
|||
|
||||
def save_process_model(self, process_model: ProcessModelInfo) -> None:
|
||||
"""Save_process_model."""
|
||||
spec_path = os.path.join(FileSystemService.root_path(), process_model.id)
|
||||
os.makedirs(spec_path, exist_ok=True)
|
||||
json_path = os.path.join(spec_path, self.WF_JSON_FILE)
|
||||
with open(json_path, "w") as wf_json:
|
||||
json.dump(
|
||||
self.WF_SCHEMA.dump(process_model), wf_json, indent=4, sort_keys=True
|
||||
)
|
||||
process_model_path = os.path.abspath(
|
||||
os.path.join(FileSystemService.root_path(), process_model.id)
|
||||
)
|
||||
os.makedirs(process_model_path, exist_ok=True)
|
||||
json_path = os.path.abspath(
|
||||
os.path.join(process_model_path, self.PROCESS_MODEL_JSON_FILE)
|
||||
)
|
||||
process_model_id = process_model.id
|
||||
# we don't save id in the json file
|
||||
# this allows us to move models around on the filesystem
|
||||
# the id is determined by its location on the filesystem
|
||||
delattr(process_model, "id")
|
||||
json_data = self.PROCESS_MODEL_SCHEMA.dump(process_model)
|
||||
self.write_json_file(json_path, json_data)
|
||||
process_model.id = process_model_id
|
||||
|
||||
def process_model_delete(self, process_model_id: str) -> None:
|
||||
"""Delete Procecss Model."""
|
||||
|
@ -99,6 +116,22 @@ class ProcessModelService(FileSystemService):
|
|||
path = f"{FileSystemService.root_path()}/{process_model_id}"
|
||||
shutil.rmtree(path)
|
||||
|
||||
def process_model_move(
|
||||
self, original_process_model_id: str, new_location: str
|
||||
) -> ProcessModelInfo:
|
||||
"""process_model_move."""
|
||||
original_model_path = os.path.abspath(
|
||||
os.path.join(FileSystemService.root_path(), original_process_model_id)
|
||||
)
|
||||
_, model_id = os.path.split(original_model_path)
|
||||
new_relative_path = f"{new_location}/{model_id}"
|
||||
new_model_path = os.path.abspath(
|
||||
os.path.join(FileSystemService.root_path(), new_relative_path)
|
||||
)
|
||||
shutil.move(original_model_path, new_model_path)
|
||||
new_process_model = self.get_process_model(new_relative_path)
|
||||
return new_process_model
|
||||
|
||||
@classmethod
|
||||
def get_process_model_from_relative_path(
|
||||
cls, relative_path: str
|
||||
|
@ -107,7 +140,7 @@ class ProcessModelService(FileSystemService):
|
|||
process_group_identifier, _ = os.path.split(relative_path)
|
||||
process_group = cls().get_process_group(process_group_identifier)
|
||||
path = os.path.join(FileSystemService.root_path(), relative_path)
|
||||
return cls().__scan_spec(path, process_group=process_group)
|
||||
return cls().__scan_process_model(path, process_group=process_group)
|
||||
|
||||
def get_process_model(self, process_model_id: str) -> ProcessModelInfo:
|
||||
"""Get a process model from a model and group id.
|
||||
|
@ -117,7 +150,9 @@ class ProcessModelService(FileSystemService):
|
|||
if not os.path.exists(FileSystemService.root_path()):
|
||||
raise ProcessEntityNotFoundError("process_model_root_not_found")
|
||||
|
||||
model_path = os.path.join(FileSystemService.root_path(), process_model_id)
|
||||
model_path = os.path.abspath(
|
||||
os.path.join(FileSystemService.root_path(), process_model_id)
|
||||
)
|
||||
if self.is_model(model_path):
|
||||
process_model = self.get_process_model_from_relative_path(process_model_id)
|
||||
return process_model
|
||||
|
@ -140,24 +175,31 @@ class ProcessModelService(FileSystemService):
|
|||
# process_group = self.__scan_process_group(
|
||||
# process_group_dir
|
||||
# )
|
||||
# return self.__scan_spec(sd.path, sd.name, process_group)
|
||||
# return self.__scan_process_model(sd.path, sd.name, process_group)
|
||||
raise ProcessEntityNotFoundError("process_model_not_found")
|
||||
|
||||
def get_process_models(
|
||||
self, process_group_id: Optional[str] = None
|
||||
self, process_group_id: Optional[str] = None, recursive: Optional[bool] = False
|
||||
) -> List[ProcessModelInfo]:
|
||||
"""Get process models."""
|
||||
process_groups = []
|
||||
if process_group_id is None:
|
||||
process_groups = self.get_process_groups()
|
||||
else:
|
||||
process_group = self.get_process_group(process_group_id)
|
||||
if process_group is not None:
|
||||
process_groups.append(process_group)
|
||||
|
||||
process_models = []
|
||||
for process_group in process_groups:
|
||||
process_models.extend(process_group.process_models)
|
||||
root_path = FileSystemService.root_path()
|
||||
if process_group_id:
|
||||
awesome_id = process_group_id.replace("/", os.sep)
|
||||
root_path = os.path.join(root_path, awesome_id)
|
||||
|
||||
process_model_glob = os.path.join(root_path, "*", "process_model.json")
|
||||
if recursive:
|
||||
process_model_glob = os.path.join(root_path, "**", "process_model.json")
|
||||
|
||||
for file in glob(process_model_glob, recursive=True):
|
||||
process_model_relative_path = os.path.relpath(
|
||||
file, start=FileSystemService.root_path()
|
||||
)
|
||||
process_model = self.get_process_model_from_relative_path(
|
||||
os.path.dirname(process_model_relative_path)
|
||||
)
|
||||
process_models.append(process_model)
|
||||
process_models.sort()
|
||||
return process_models
|
||||
|
||||
|
@ -172,8 +214,11 @@ class ProcessModelService(FileSystemService):
|
|||
def get_process_group(self, process_group_id: str) -> ProcessGroup:
|
||||
"""Look for a given process_group, and return it."""
|
||||
if os.path.exists(FileSystemService.root_path()):
|
||||
process_group_path = os.path.join(
|
||||
FileSystemService.root_path(), process_group_id
|
||||
process_group_path = os.path.abspath(
|
||||
os.path.join(
|
||||
FileSystemService.root_path(),
|
||||
FileSystemService.id_string_to_relative_path(process_group_id),
|
||||
)
|
||||
)
|
||||
if self.is_group(process_group_path):
|
||||
return self.__scan_process_group(process_group_path)
|
||||
|
@ -205,16 +250,28 @@ class ProcessModelService(FileSystemService):
|
|||
"""Update_process_group."""
|
||||
cat_path = self.process_group_path(process_group.id)
|
||||
os.makedirs(cat_path, exist_ok=True)
|
||||
json_path = os.path.join(cat_path, self.CAT_JSON_FILE)
|
||||
with open(json_path, "w") as cat_json:
|
||||
json.dump(
|
||||
process_group.serialized,
|
||||
cat_json,
|
||||
indent=4,
|
||||
sort_keys=True,
|
||||
)
|
||||
json_path = os.path.join(cat_path, self.PROCESS_GROUP_JSON_FILE)
|
||||
serialized_process_group = process_group.serialized
|
||||
# we don't store `id` in the json files
|
||||
# this allows us to move groups around on the filesystem
|
||||
del serialized_process_group["id"]
|
||||
self.write_json_file(json_path, serialized_process_group)
|
||||
return process_group
|
||||
|
||||
def process_group_move(
|
||||
self, original_process_group_id: str, new_location: str
|
||||
) -> ProcessGroup:
|
||||
"""process_group_move."""
|
||||
original_group_path = self.process_group_path(original_process_group_id)
|
||||
original_root, original_group_id = os.path.split(original_group_path)
|
||||
new_root = f"{FileSystemService.root_path()}/{new_location}"
|
||||
new_group_path = os.path.abspath(
|
||||
os.path.join(FileSystemService.root_path(), new_root, original_group_id)
|
||||
)
|
||||
destination = shutil.move(original_group_path, new_group_path)
|
||||
new_process_group = self.get_process_group(destination)
|
||||
return new_process_group
|
||||
|
||||
def __get_all_nested_models(self, group_path: str) -> list:
|
||||
"""__get_all_nested_models."""
|
||||
all_nested_models = []
|
||||
|
@ -279,10 +336,13 @@ class ProcessModelService(FileSystemService):
|
|||
|
||||
def __scan_process_group(self, dir_path: str) -> ProcessGroup:
|
||||
"""Reads the process_group.json file, and any nested directories."""
|
||||
cat_path = os.path.join(dir_path, self.CAT_JSON_FILE)
|
||||
cat_path = os.path.join(dir_path, self.PROCESS_GROUP_JSON_FILE)
|
||||
if os.path.exists(cat_path):
|
||||
with open(cat_path) as cat_json:
|
||||
data = json.load(cat_json)
|
||||
# we don't store `id` in the json files, so we add it back in here
|
||||
relative_path = os.path.relpath(dir_path, FileSystemService.root_path())
|
||||
data["id"] = relative_path
|
||||
process_group = ProcessGroup(**data)
|
||||
if process_group is None:
|
||||
raise ApiError(
|
||||
|
@ -292,13 +352,14 @@ class ProcessModelService(FileSystemService):
|
|||
else:
|
||||
process_group_id = dir_path.replace(FileSystemService.root_path(), "")
|
||||
process_group = ProcessGroup(
|
||||
id=process_group_id,
|
||||
id="",
|
||||
display_name=process_group_id,
|
||||
display_order=10000,
|
||||
admin=False,
|
||||
)
|
||||
with open(cat_path, "w") as wf_json:
|
||||
json.dump(self.GROUP_SCHEMA.dump(process_group), wf_json, indent=4)
|
||||
self.write_json_file(cat_path, self.GROUP_SCHEMA.dump(process_group))
|
||||
# we don't store `id` in the json files, so we add it in here
|
||||
process_group.id = process_group_id
|
||||
with os.scandir(dir_path) as nested_items:
|
||||
process_group.process_models = []
|
||||
process_group.process_groups = []
|
||||
|
@ -312,7 +373,7 @@ class ProcessModelService(FileSystemService):
|
|||
)
|
||||
elif self.is_model(nested_item.path):
|
||||
process_group.process_models.append(
|
||||
self.__scan_spec(
|
||||
self.__scan_process_model(
|
||||
nested_item.path,
|
||||
nested_item.name,
|
||||
process_group=process_group,
|
||||
|
@ -322,22 +383,25 @@ class ProcessModelService(FileSystemService):
|
|||
# process_group.process_groups.sort()
|
||||
return process_group
|
||||
|
||||
def __scan_spec(
|
||||
def __scan_process_model(
|
||||
self,
|
||||
path: str,
|
||||
name: Optional[str] = None,
|
||||
process_group: Optional[ProcessGroup] = None,
|
||||
) -> ProcessModelInfo:
|
||||
"""__scan_spec."""
|
||||
spec_path = os.path.join(path, self.WF_JSON_FILE)
|
||||
"""__scan_process_model."""
|
||||
json_file_path = os.path.join(path, self.PROCESS_MODEL_JSON_FILE)
|
||||
|
||||
if os.path.exists(spec_path):
|
||||
with open(spec_path) as wf_json:
|
||||
if os.path.exists(json_file_path):
|
||||
with open(json_file_path) as wf_json:
|
||||
data = json.load(wf_json)
|
||||
if "process_group_id" in data:
|
||||
data.pop("process_group_id")
|
||||
spec = ProcessModelInfo(**data)
|
||||
if spec is None:
|
||||
# we don't save `id` in the json file, so we add it back in here.
|
||||
relative_path = os.path.relpath(path, FileSystemService.root_path())
|
||||
data["id"] = relative_path
|
||||
process_model_info = ProcessModelInfo(**data)
|
||||
if process_model_info is None:
|
||||
raise ApiError(
|
||||
error_code="process_model_could_not_be_loaded_from_disk",
|
||||
message=f"We could not load the process_model from disk with data: {data}",
|
||||
|
@ -349,15 +413,18 @@ class ProcessModelService(FileSystemService):
|
|||
message="Missing name of process model. It should be given",
|
||||
)
|
||||
|
||||
spec = ProcessModelInfo(
|
||||
id=name,
|
||||
process_model_info = ProcessModelInfo(
|
||||
id="",
|
||||
display_name=name,
|
||||
description="",
|
||||
display_order=0,
|
||||
is_review=False,
|
||||
)
|
||||
with open(spec_path, "w") as wf_json:
|
||||
json.dump(self.WF_SCHEMA.dump(spec), wf_json, indent=4)
|
||||
self.write_json_file(
|
||||
json_file_path, self.PROCESS_MODEL_SCHEMA.dump(process_model_info)
|
||||
)
|
||||
# we don't store `id` in the json files, so we add it in here
|
||||
process_model_info.id = name
|
||||
if process_group:
|
||||
spec.process_group = process_group.id
|
||||
return spec
|
||||
process_model_info.process_group = process_group.id
|
||||
return process_model_info
|
||||
|
|
|
@ -19,15 +19,15 @@ from spiffworkflow_backend.models.secret_model import SecretModel
|
|||
class SecretService:
|
||||
"""SecretService."""
|
||||
|
||||
def encrypt_key(self, plain_key: str) -> str:
|
||||
"""Encrypt_key."""
|
||||
# flask_secret = current_app.secret_key
|
||||
# print("encrypt_key")
|
||||
...
|
||||
# def encrypt_key(self, plain_key: str) -> str:
|
||||
# """Encrypt_key."""
|
||||
# # flask_secret = current_app.secret_key
|
||||
# # print("encrypt_key")
|
||||
# ...
|
||||
|
||||
def decrypt_key(self, encrypted_key: str) -> str:
|
||||
"""Decrypt key."""
|
||||
...
|
||||
# def decrypt_key(self, encrypted_key: str) -> str:
|
||||
# """Decrypt key."""
|
||||
# ...
|
||||
|
||||
@staticmethod
|
||||
def add_secret(
|
||||
|
|
|
@ -171,7 +171,7 @@ class SpecFileService(FileSystemService):
|
|||
ref.is_primary = True
|
||||
|
||||
if ref.is_primary:
|
||||
ProcessModelService().update_spec(
|
||||
ProcessModelService().update_process_model(
|
||||
process_model_info,
|
||||
{
|
||||
"primary_process_id": ref.identifier,
|
||||
|
@ -197,7 +197,9 @@ class SpecFileService(FileSystemService):
|
|||
@staticmethod
|
||||
def full_file_path(spec: ProcessModelInfo, file_name: str) -> str:
|
||||
"""File_path."""
|
||||
return os.path.join(SpecFileService.workflow_path(spec), file_name)
|
||||
return os.path.abspath(
|
||||
os.path.join(SpecFileService.workflow_path(spec), file_name)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def last_modified(spec: ProcessModelInfo, file_name: str) -> datetime:
|
||||
|
|
|
@ -2,9 +2,7 @@
|
|||
"title": "Simple form",
|
||||
"description": "A simple form example.",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name"
|
||||
],
|
||||
"required": ["name"],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
|
@ -14,11 +12,7 @@
|
|||
"department": {
|
||||
"type": "string",
|
||||
"title": "Department",
|
||||
"enum": [
|
||||
"Finance",
|
||||
"HR",
|
||||
"IT"
|
||||
]
|
||||
"enum": ["Finance", "HR", "IT"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
{
|
||||
"name": {
|
||||
"name": {
|
||||
"ui:title": "Name",
|
||||
"ui:description": "(Your name)"
|
||||
},
|
||||
"department": {
|
||||
"department": {
|
||||
"ui:title": "Department",
|
||||
"ui:description": "(Your department)"
|
||||
},
|
||||
|
|
|
@ -137,7 +137,9 @@ class BaseTest:
|
|||
# make sure we have a group
|
||||
process_group_id, _ = os.path.split(process_model_id)
|
||||
modified_process_group_id = process_group_id.replace("/", ":")
|
||||
process_group_path = f"{FileSystemService.root_path()}/{process_group_id}"
|
||||
process_group_path = os.path.abspath(
|
||||
os.path.join(FileSystemService.root_path(), process_group_id)
|
||||
)
|
||||
if ProcessModelService().is_group(process_group_path):
|
||||
|
||||
if exception_notification_addresses is None:
|
||||
|
|
|
@ -39,7 +39,7 @@ class ExampleDataLoader:
|
|||
is_review=False,
|
||||
)
|
||||
workflow_spec_service = ProcessModelService()
|
||||
workflow_spec_service.add_spec(spec)
|
||||
workflow_spec_service.add_process_model(spec)
|
||||
|
||||
bpmn_file_name_with_extension = bpmn_file_name
|
||||
if not bpmn_file_name_with_extension:
|
||||
|
|
|
@ -355,6 +355,41 @@ class TestProcessApi(BaseTest):
|
|||
assert response.json["primary_process_id"] == "superduper"
|
||||
assert response.json["is_review"] is False
|
||||
|
||||
def test_process_model_list_all(
|
||||
self,
|
||||
app: Flask,
|
||||
client: FlaskClient,
|
||||
with_db_and_bpmn_file_cleanup: None,
|
||||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_process_model_list_all."""
|
||||
group_id = "test_group/test_sub_group"
|
||||
self.create_process_group(client, with_super_admin_user, group_id)
|
||||
|
||||
# add 5 models to the group
|
||||
for i in range(5):
|
||||
process_model_identifier = f"{group_id}/test_model_{i}"
|
||||
model_display_name = f"Test Model {i}"
|
||||
model_description = f"Test Model {i} Description"
|
||||
self.create_process_model_with_api(
|
||||
client,
|
||||
process_model_id=process_model_identifier,
|
||||
process_model_display_name=model_display_name,
|
||||
process_model_description=model_description,
|
||||
user=with_super_admin_user,
|
||||
)
|
||||
|
||||
# get all models
|
||||
response = client.get(
|
||||
"/v1.0/process-models?per_page=1000&recursive=true",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
assert response.json is not None
|
||||
assert len(response.json["results"]) == 5
|
||||
assert response.json["pagination"]["count"] == 5
|
||||
assert response.json["pagination"]["total"] == 5
|
||||
assert response.json["pagination"]["pages"] == 1
|
||||
|
||||
def test_process_model_list(
|
||||
self,
|
||||
app: Flask,
|
||||
|
@ -1830,7 +1865,7 @@ class TestProcessApi(BaseTest):
|
|||
process_model = ProcessModelService().get_process_model(
|
||||
process_model_identifier
|
||||
)
|
||||
ProcessModelService().update_spec(
|
||||
ProcessModelService().update_process_model(
|
||||
process_model,
|
||||
{"fault_or_suspend_on_exception": NotificationType.suspend.value},
|
||||
)
|
||||
|
@ -1885,7 +1920,7 @@ class TestProcessApi(BaseTest):
|
|||
process_model = ProcessModelService().get_process_model(
|
||||
process_model_identifier
|
||||
)
|
||||
ProcessModelService().update_spec(
|
||||
ProcessModelService().update_process_model(
|
||||
process_model,
|
||||
{"exception_notification_addresses": ["with_super_admin_user@example.com"]},
|
||||
)
|
||||
|
@ -2369,3 +2404,115 @@ class TestProcessApi(BaseTest):
|
|||
)
|
||||
|
||||
print("test_script_unit_test_run")
|
||||
|
||||
def setup_initial_groups_for_move_tests(
|
||||
self, client: FlaskClient, with_super_admin_user: UserModel
|
||||
) -> None:
|
||||
"""setup_initial_groups_for_move_tests."""
|
||||
groups = ["group_a", "group_b", "group_b/group_bb"]
|
||||
# setup initial groups
|
||||
for group in groups:
|
||||
self.create_process_group(
|
||||
client, with_super_admin_user, group, display_name=group
|
||||
)
|
||||
# make sure initial groups exist
|
||||
for group in groups:
|
||||
persisted = ProcessModelService().get_process_group(group)
|
||||
assert persisted is not None
|
||||
assert persisted.id == group
|
||||
|
||||
def test_move_model(
|
||||
self,
|
||||
app: Flask,
|
||||
client: FlaskClient,
|
||||
with_db_and_bpmn_file_cleanup: None,
|
||||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""test_move_model."""
|
||||
self.setup_initial_groups_for_move_tests(client, with_super_admin_user)
|
||||
|
||||
process_model_id = "test_model"
|
||||
original_location = "group_a"
|
||||
original_process_model_path = f"{original_location}/{process_model_id}"
|
||||
|
||||
# add model to `group_a`
|
||||
self.create_process_model_with_api(
|
||||
client,
|
||||
original_process_model_path,
|
||||
user=with_super_admin_user,
|
||||
process_model_display_name=process_model_id,
|
||||
process_model_description=process_model_id,
|
||||
)
|
||||
persisted = ProcessModelService().get_process_model(original_process_model_path)
|
||||
assert persisted is not None
|
||||
assert persisted.id == original_process_model_path
|
||||
|
||||
# move model to `group_b/group_bb`
|
||||
new_location = "group_b/group_bb"
|
||||
new_process_model_path = f"{new_location}/{process_model_id}"
|
||||
modified_original_process_model_id = original_process_model_path.replace(
|
||||
"/", ":"
|
||||
)
|
||||
|
||||
response = client.put(
|
||||
f"/v1.0/process-models/{modified_original_process_model_id}/move?new_location={new_location}",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
assert response.status_code == 201
|
||||
assert response.json["id"] == new_process_model_path
|
||||
|
||||
# make sure the original model does not exist
|
||||
with pytest.raises(ProcessEntityNotFoundError) as e:
|
||||
ProcessModelService().get_process_model(original_process_model_path)
|
||||
assert e.value.args[0] == "process_model_not_found"
|
||||
|
||||
# make sure the new model does exist
|
||||
new_process_model = ProcessModelService().get_process_model(
|
||||
new_process_model_path
|
||||
)
|
||||
assert new_process_model is not None
|
||||
assert new_process_model.id == new_process_model_path
|
||||
|
||||
def test_move_group(
|
||||
self,
|
||||
app: Flask,
|
||||
client: FlaskClient,
|
||||
with_db_and_bpmn_file_cleanup: None,
|
||||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""test_move_group."""
|
||||
self.setup_initial_groups_for_move_tests(client, with_super_admin_user)
|
||||
|
||||
# add sub group to `group_a`
|
||||
sub_group_id = "sub_group"
|
||||
original_location = "group_a"
|
||||
original_sub_path = f"{original_location}/{sub_group_id}"
|
||||
self.create_process_group(
|
||||
client, with_super_admin_user, original_sub_path, display_name=sub_group_id
|
||||
)
|
||||
# make sure original subgroup exists
|
||||
persisted = ProcessModelService().get_process_group(original_sub_path)
|
||||
assert persisted is not None
|
||||
assert persisted.id == original_sub_path
|
||||
|
||||
# move sub_group to `group_b/group_bb`
|
||||
new_location = "group_b/group_bb"
|
||||
new_sub_path = f"{new_location}/{sub_group_id}"
|
||||
modified_original_process_group_id = original_sub_path.replace("/", ":")
|
||||
response = client.put(
|
||||
f"/v1.0/process-groups/{modified_original_process_group_id}/move?new_location={new_location}",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
assert response.status_code == 201
|
||||
assert response.json["id"] == new_sub_path
|
||||
|
||||
# make sure the original subgroup does not exist
|
||||
with pytest.raises(ProcessEntityNotFoundError) as e:
|
||||
ProcessModelService().get_process_group(original_sub_path)
|
||||
|
||||
assert e.value.args[0] == "process_group_not_found"
|
||||
assert e.value.args[1] == f"Process Group Id: {original_sub_path}"
|
||||
|
||||
# make sure the new subgroup does exist
|
||||
new_process_group = ProcessModelService().get_process_group(new_sub_path)
|
||||
assert new_process_group.id == new_sub_path
|
||||
|
|
|
@ -32,7 +32,9 @@ class TestProcessModelService(BaseTest):
|
|||
primary_process_id = process_model.primary_process_id
|
||||
assert primary_process_id == "Process_HelloWorld"
|
||||
|
||||
ProcessModelService().update_spec(process_model, {"display_name": "new_name"})
|
||||
ProcessModelService().update_process_model(
|
||||
process_model, {"display_name": "new_name"}
|
||||
)
|
||||
|
||||
assert process_model.display_name == "new_name"
|
||||
assert process_model.primary_process_id == primary_process_id
|
||||
|
|
|
@ -32,12 +32,15 @@ describe('process-groups', () => {
|
|||
|
||||
cy.contains('Delete').click();
|
||||
cy.contains('Are you sure');
|
||||
cy.getBySel('modal-confirmation-dialog').find('.cds--btn--danger').click();
|
||||
cy.getBySel('delete-process-group-button-modal-confirmation-dialog')
|
||||
.find('.cds--btn--danger')
|
||||
.click();
|
||||
cy.url().should('include', `process-groups`);
|
||||
cy.contains(groupId).should('not.exist');
|
||||
});
|
||||
|
||||
it('can paginate items', () => {
|
||||
cy.basicPaginationTest();
|
||||
});
|
||||
// process groups no longer has pagination post-tiles
|
||||
// it('can paginate items', () => {
|
||||
// cy.basicPaginationTest();
|
||||
// });
|
||||
});
|
||||
|
|
|
@ -3,9 +3,9 @@ import { DATE_FORMAT, PROCESS_STATUSES } from '../../src/config';
|
|||
|
||||
const filterByDate = (fromDate) => {
|
||||
cy.get('#date-picker-start-from').clear().type(format(fromDate, DATE_FORMAT));
|
||||
cy.contains('Start date from').click();
|
||||
cy.contains('Start date to').click();
|
||||
cy.get('#date-picker-end-from').clear().type(format(fromDate, DATE_FORMAT));
|
||||
cy.contains('End date from').click();
|
||||
cy.contains('End date to').click();
|
||||
cy.getBySel('filter-button').click();
|
||||
};
|
||||
|
||||
|
@ -53,9 +53,9 @@ const updateBpmnPythonScriptWithMonaco = (
|
|||
cy.get('.monaco-editor textarea:first')
|
||||
.click()
|
||||
.focused() // change subject to currently focused element
|
||||
// .type('{ctrl}a') // had been doing it this way, but it turns out to be flaky relative to clear()
|
||||
.clear()
|
||||
.type(pythonScript, { delay: 30 });
|
||||
// long delay to ensure cypress isn't competing with monaco auto complete stuff
|
||||
.type(pythonScript, { delay: 120 });
|
||||
|
||||
cy.contains('Close').click();
|
||||
// wait for a little bit for the xml to get set before saving
|
||||
|
@ -119,28 +119,28 @@ describe('process-instances', () => {
|
|||
cy.runPrimaryBpmnFile();
|
||||
});
|
||||
|
||||
it('can create a new instance and can modify with monaco text editor', () => {
|
||||
// leave off the ending double quote since manco adds it
|
||||
const originalPythonScript = 'person = "Kevin';
|
||||
const newPythonScript = 'person = "Mike';
|
||||
|
||||
const bpmnFile = 'process_model_one.bpmn';
|
||||
|
||||
// Change bpmn
|
||||
cy.getBySel('files-accordion').click();
|
||||
cy.getBySel(`edit-file-${bpmnFile.replace('.', '-')}`).click();
|
||||
cy.contains(`Process Model File: ${bpmnFile}`);
|
||||
updateBpmnPythonScriptWithMonaco(newPythonScript);
|
||||
cy.contains('acceptance-tests-model-1').click();
|
||||
cy.runPrimaryBpmnFile();
|
||||
|
||||
cy.getBySel('files-accordion').click();
|
||||
cy.getBySel(`edit-file-${bpmnFile.replace('.', '-')}`).click();
|
||||
cy.contains(`Process Model File: ${bpmnFile}`);
|
||||
updateBpmnPythonScriptWithMonaco(originalPythonScript);
|
||||
cy.contains('acceptance-tests-model-1').click();
|
||||
cy.runPrimaryBpmnFile();
|
||||
});
|
||||
// it('can create a new instance and can modify with monaco text editor', () => {
|
||||
// // leave off the ending double quote since manco adds it
|
||||
// const originalPythonScript = 'person = "Kevin';
|
||||
// const newPythonScript = 'person = "Mike';
|
||||
//
|
||||
// const bpmnFile = 'process_model_one.bpmn';
|
||||
//
|
||||
// // Change bpmn
|
||||
// cy.getBySel('files-accordion').click();
|
||||
// cy.getBySel(`edit-file-${bpmnFile.replace('.', '-')}`).click();
|
||||
// cy.contains(`Process Model File: ${bpmnFile}`);
|
||||
// updateBpmnPythonScriptWithMonaco(newPythonScript);
|
||||
// cy.contains('acceptance-tests-model-1').click();
|
||||
// cy.runPrimaryBpmnFile();
|
||||
//
|
||||
// cy.getBySel('files-accordion').click();
|
||||
// cy.getBySel(`edit-file-${bpmnFile.replace('.', '-')}`).click();
|
||||
// cy.contains(`Process Model File: ${bpmnFile}`);
|
||||
// updateBpmnPythonScriptWithMonaco(originalPythonScript);
|
||||
// cy.contains('acceptance-tests-model-1').click();
|
||||
// cy.runPrimaryBpmnFile();
|
||||
// });
|
||||
|
||||
it('can paginate items', () => {
|
||||
// make sure we have some process instances
|
||||
|
@ -174,13 +174,12 @@ describe('process-instances', () => {
|
|||
if (!['all', 'waiting'].includes(processStatus)) {
|
||||
cy.get(statusSelect).click();
|
||||
cy.get(statusSelect).contains(processStatus).click();
|
||||
// close the dropdown again
|
||||
cy.get(statusSelect).click();
|
||||
cy.getBySel('filter-button').click();
|
||||
// FIXME: wait a little bit for the useEffects to be able to fully set processInstanceFilters
|
||||
cy.wait(1000);
|
||||
cy.url().should('include', `status=${processStatus}`);
|
||||
cy.assertAtLeastOneItemInPaginatedResults();
|
||||
cy.getBySel(`process-instance-status-${processStatus}`).contains(
|
||||
processStatus
|
||||
);
|
||||
cy.getBySel(`process-instance-status-${processStatus}`);
|
||||
// there should really only be one, but in CI there are sometimes more
|
||||
cy.get('div[aria-label="Clear all selected items"]:first').click();
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import { modifyProcessModelPath } from '../../src/helpers';
|
||||
|
||||
describe('process-models', () => {
|
||||
beforeEach(() => {
|
||||
cy.login();
|
||||
|
@ -9,14 +11,19 @@ describe('process-models', () => {
|
|||
it('can perform crud operations', () => {
|
||||
const uuid = () => Cypress._.random(0, 1e6);
|
||||
const id = uuid();
|
||||
const groupId = 'acceptance-tests-group-one';
|
||||
const groupId = 'misc/acceptance-tests-group-one';
|
||||
const groupDisplayName = 'Acceptance Tests Group One';
|
||||
const modelDisplayName = `Test Model 2 ${id}`;
|
||||
const newModelDisplayName = `${modelDisplayName} edited`;
|
||||
const modelId = `test-model-2-${id}`;
|
||||
const newModelDisplayName = `${modelDisplayName} edited`;
|
||||
cy.contains('Misc').click();
|
||||
cy.wait(500);
|
||||
cy.contains(groupDisplayName).click();
|
||||
cy.createModel(groupId, modelId, modelDisplayName);
|
||||
cy.url().should('include', `process-models/${groupId}:${modelId}`);
|
||||
cy.url().should(
|
||||
'include',
|
||||
`process-models/${modifyProcessModelPath(groupId)}:${modelId}`
|
||||
);
|
||||
cy.contains(`Process Model: ${modelDisplayName}`);
|
||||
|
||||
cy.contains('Edit process model').click();
|
||||
|
@ -29,17 +36,26 @@ describe('process-models', () => {
|
|||
newModelDisplayName
|
||||
);
|
||||
|
||||
cy.contains('Delete').click();
|
||||
// go back to process model show by clicking on the breadcrumb
|
||||
cy.contains(modelId).click();
|
||||
|
||||
cy.getBySel('delete-process-model-button').click();
|
||||
cy.contains('Are you sure');
|
||||
cy.getBySel('modal-confirmation-dialog').find('.cds--btn--danger').click();
|
||||
cy.url().should('include', `process-groups/${groupId}`);
|
||||
cy.getBySel('delete-process-model-button-modal-confirmation-dialog')
|
||||
.find('.cds--btn--danger')
|
||||
.click();
|
||||
cy.url().should(
|
||||
'include',
|
||||
`process-groups/${modifyProcessModelPath(groupId)}`
|
||||
);
|
||||
cy.contains(modelId).should('not.exist');
|
||||
});
|
||||
|
||||
it('can create new bpmn, dmn, and json files', () => {
|
||||
const uuid = () => Cypress._.random(0, 1e6);
|
||||
const id = uuid();
|
||||
const groupId = 'acceptance-tests-group-one';
|
||||
const directParentGroupId = 'acceptance-tests-group-one';
|
||||
const groupId = `misc/${directParentGroupId}`;
|
||||
const groupDisplayName = 'Acceptance Tests Group One';
|
||||
const modelDisplayName = `Test Model 2 ${id}`;
|
||||
const modelId = `test-model-2-${id}`;
|
||||
|
@ -48,13 +64,17 @@ describe('process-models', () => {
|
|||
const dmnFileName = `dmn_test_file_${id}`;
|
||||
const jsonFileName = `json_test_file_${id}`;
|
||||
|
||||
cy.contains('Misc').click();
|
||||
cy.wait(500);
|
||||
cy.contains(groupDisplayName).click();
|
||||
cy.createModel(groupId, modelId, modelDisplayName);
|
||||
cy.contains(groupId).click();
|
||||
cy.contains(directParentGroupId).click();
|
||||
cy.contains(modelId).click();
|
||||
cy.url().should('include', `process-models/${groupId}:${modelId}`);
|
||||
cy.url().should(
|
||||
'include',
|
||||
`process-models/${modifyProcessModelPath(groupId)}:${modelId}`
|
||||
);
|
||||
cy.contains(`Process Model: ${modelDisplayName}`);
|
||||
cy.getBySel('files-accordion').click();
|
||||
cy.contains(`${bpmnFileName}.bpmn`).should('not.exist');
|
||||
cy.contains(`${dmnFileName}.dmn`).should('not.exist');
|
||||
cy.contains(`${jsonFileName}.json`).should('not.exist');
|
||||
|
@ -73,7 +93,7 @@ describe('process-models', () => {
|
|||
cy.contains(`Process Model File: ${bpmnFileName}`);
|
||||
cy.contains(modelId).click();
|
||||
cy.contains(`Process Model: ${modelDisplayName}`);
|
||||
cy.getBySel('files-accordion').click();
|
||||
// cy.getBySel('files-accordion').click();
|
||||
cy.contains(`${bpmnFileName}.bpmn`).should('exist');
|
||||
|
||||
// add new dmn file
|
||||
|
@ -81,13 +101,17 @@ describe('process-models', () => {
|
|||
cy.contains(/^Process Model File$/);
|
||||
cy.get('g[data-element-id=decision_1]').click().should('exist');
|
||||
cy.contains('General').click();
|
||||
cy.get('#bio-properties-panel-id')
|
||||
.clear()
|
||||
.type('decision_acceptance_test_1');
|
||||
cy.contains('General').click();
|
||||
cy.contains('Save').click();
|
||||
cy.get('input[name=file_name]').type(dmnFileName);
|
||||
cy.contains('Save Changes').click();
|
||||
cy.contains(`Process Model File: ${dmnFileName}`);
|
||||
cy.contains(modelId).click();
|
||||
cy.contains(`Process Model: ${modelDisplayName}`);
|
||||
cy.getBySel('files-accordion').click();
|
||||
// cy.getBySel('files-accordion').click();
|
||||
cy.contains(`${dmnFileName}.dmn`).should('exist');
|
||||
|
||||
// add new json file
|
||||
|
@ -103,35 +127,41 @@ describe('process-models', () => {
|
|||
cy.wait(500);
|
||||
cy.contains(modelId).click();
|
||||
cy.contains(`Process Model: ${modelDisplayName}`);
|
||||
cy.getBySel('files-accordion').click();
|
||||
// cy.getBySel('files-accordion').click();
|
||||
cy.contains(`${jsonFileName}.json`).should('exist');
|
||||
|
||||
cy.contains('Edit process model').click();
|
||||
cy.contains('Delete').click();
|
||||
cy.getBySel('delete-process-model-button').click();
|
||||
cy.contains('Are you sure');
|
||||
cy.getBySel('modal-confirmation-dialog').find('.cds--btn--danger').click();
|
||||
cy.url().should('include', `process-groups/${groupId}`);
|
||||
cy.getBySel('delete-process-model-button-modal-confirmation-dialog')
|
||||
.find('.cds--btn--danger')
|
||||
.click();
|
||||
cy.url().should('include', `process-groups/${modifyProcessModelPath(groupId)}`);
|
||||
cy.contains(modelId).should('not.exist');
|
||||
});
|
||||
|
||||
it('can upload and run a bpmn file', () => {
|
||||
const uuid = () => Cypress._.random(0, 1e6);
|
||||
const id = uuid();
|
||||
const groupId = 'acceptance-tests-group-one';
|
||||
const directParentGroupId = 'acceptance-tests-group-one';
|
||||
const groupId = `misc/${directParentGroupId}`;
|
||||
const groupDisplayName = 'Acceptance Tests Group One';
|
||||
const modelDisplayName = `Test Model 2 ${id}`;
|
||||
const modelId = `test-model-2-${id}`;
|
||||
cy.contains('Add a process group');
|
||||
cy.contains('Misc').click();
|
||||
cy.wait(500);
|
||||
cy.contains(groupDisplayName).click();
|
||||
cy.createModel(groupId, modelId, modelDisplayName);
|
||||
|
||||
cy.contains(`${groupId}`).click();
|
||||
cy.contains(`${directParentGroupId}`).click();
|
||||
cy.contains('Add a process model');
|
||||
cy.contains(modelId).click();
|
||||
cy.url().should('include', `process-models/${groupId}:${modelId}`);
|
||||
cy.url().should(
|
||||
'include',
|
||||
`process-models/${modifyProcessModelPath(groupId)}:${modelId}`
|
||||
);
|
||||
cy.contains(`Process Model: ${modelDisplayName}`);
|
||||
|
||||
cy.getBySel('files-accordion').click();
|
||||
cy.getBySel('upload-file-button').click();
|
||||
cy.contains('Add file').selectFile(
|
||||
'cypress/fixtures/test_bpmn_file_upload.bpmn'
|
||||
|
@ -142,24 +172,32 @@ describe('process-models', () => {
|
|||
.click();
|
||||
cy.runPrimaryBpmnFile();
|
||||
|
||||
cy.getBySel('process-instance-list-link').click();
|
||||
// cy.getBySel('process-instance-list-link').click();
|
||||
cy.getBySel('process-instance-show-link').click();
|
||||
cy.getBySel('process-instance-delete').click();
|
||||
cy.contains('Are you sure');
|
||||
cy.getBySel('modal-confirmation-dialog').find('.cds--btn--danger').click();
|
||||
cy.getBySel('process-instance-delete-modal-confirmation-dialog')
|
||||
.find('.cds--btn--danger')
|
||||
.click();
|
||||
|
||||
// in breadcrumb
|
||||
cy.contains(modelId).click();
|
||||
|
||||
cy.contains('Edit process model').click();
|
||||
cy.contains('Delete').click();
|
||||
cy.getBySel('delete-process-model-button').click();
|
||||
cy.contains('Are you sure');
|
||||
cy.getBySel('modal-confirmation-dialog').find('.cds--btn--danger').click();
|
||||
cy.url().should('include', `process-groups/${groupId}`);
|
||||
cy.getBySel('delete-process-model-button-modal-confirmation-dialog')
|
||||
.find('.cds--btn--danger')
|
||||
.click();
|
||||
cy.url().should(
|
||||
'include',
|
||||
`process-groups/${modifyProcessModelPath(groupId)}`
|
||||
);
|
||||
cy.contains(modelId).should('not.exist');
|
||||
});
|
||||
|
||||
it('can paginate items', () => {
|
||||
cy.contains('Misc').click();
|
||||
cy.wait(500);
|
||||
cy.contains('Acceptance Tests Group One').click();
|
||||
cy.basicPaginationTest();
|
||||
});
|
||||
|
@ -167,6 +205,6 @@ describe('process-models', () => {
|
|||
it('can allow searching for model', () => {
|
||||
cy.getBySel('process-model-selection').click().type('model-3');
|
||||
cy.contains('acceptance-tests-group-one/acceptance-tests-model-3').click();
|
||||
cy.contains('List').click();
|
||||
cy.contains('Acceptance Tests Model 3');
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,18 +1,27 @@
|
|||
const submitInputIntoFormField = (taskName, fieldKey, fieldValue) => {
|
||||
cy.contains(`Task: ${taskName}`);
|
||||
cy.contains(`Task: ${taskName}`, { timeout: 10000 });
|
||||
cy.get(fieldKey).clear().type(fieldValue);
|
||||
cy.contains('Submit').click();
|
||||
};
|
||||
|
||||
const checkFormFieldIsReadOnly = (formName, fieldKey) => {
|
||||
cy.contains(`Task: ${formName}`);
|
||||
cy.get(fieldKey).invoke('attr', 'readonly').should('exist');
|
||||
cy.get(fieldKey).invoke('attr', 'disabled').should('exist');
|
||||
};
|
||||
|
||||
const checkTaskHasClass = (taskName, className) => {
|
||||
cy.get(`g[data-element-id=${taskName}]`).should('have.class', className);
|
||||
};
|
||||
|
||||
const kickOffModelWithForm = (modelId, formName) => {
|
||||
cy.navigateToProcessModel(
|
||||
'Acceptance Tests Group One',
|
||||
'Acceptance Tests Model 2',
|
||||
'acceptance-tests-model-2'
|
||||
);
|
||||
cy.runPrimaryBpmnFile(true);
|
||||
};
|
||||
|
||||
describe('tasks', () => {
|
||||
beforeEach(() => {
|
||||
cy.login();
|
||||
|
@ -21,7 +30,6 @@ describe('tasks', () => {
|
|||
cy.logout();
|
||||
});
|
||||
|
||||
// TODO: need to fix the next_task thing to make this pass
|
||||
it('can complete and navigate a form', () => {
|
||||
const groupDisplayName = 'Acceptance Tests Group One';
|
||||
const modelId = `acceptance-tests-model-2`;
|
||||
|
@ -30,11 +38,7 @@ describe('tasks', () => {
|
|||
const activeTaskClassName = 'active-task-highlight';
|
||||
|
||||
cy.navigateToProcessModel(groupDisplayName, modelDisplayName, modelId);
|
||||
|
||||
// avoid reloading so we can click on the task link that appears on running the process instance
|
||||
cy.runPrimaryBpmnFile(false);
|
||||
|
||||
cy.contains('my task').click();
|
||||
cy.runPrimaryBpmnFile(true);
|
||||
|
||||
submitInputIntoFormField(
|
||||
'get_user_generated_number_one',
|
||||
|
@ -59,7 +63,6 @@ describe('tasks', () => {
|
|||
'#root_user_generated_number_1'
|
||||
);
|
||||
|
||||
cy.getBySel('form-nav-form3').should('have.text', 'form3 - Current');
|
||||
cy.getBySel('form-nav-form3').click();
|
||||
submitInputIntoFormField(
|
||||
'get_user_generated_number_three',
|
||||
|
@ -111,18 +114,12 @@ describe('tasks', () => {
|
|||
});
|
||||
|
||||
it('can paginate items', () => {
|
||||
cy.navigateToProcessModel(
|
||||
'Acceptance Tests Group One',
|
||||
'Acceptance Tests Model 2',
|
||||
'acceptance-tests-model-2'
|
||||
);
|
||||
|
||||
// make sure we have some tasks
|
||||
cy.runPrimaryBpmnFile();
|
||||
cy.runPrimaryBpmnFile();
|
||||
cy.runPrimaryBpmnFile();
|
||||
cy.runPrimaryBpmnFile();
|
||||
cy.runPrimaryBpmnFile();
|
||||
kickOffModelWithForm();
|
||||
kickOffModelWithForm();
|
||||
kickOffModelWithForm();
|
||||
kickOffModelWithForm();
|
||||
kickOffModelWithForm();
|
||||
|
||||
cy.navigateToHome();
|
||||
cy.basicPaginationTest();
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { string } from 'prop-types';
|
||||
import { modifyProcessModelPath } from '../../src/helpers';
|
||||
|
||||
// ***********************************************
|
||||
// This example commands.js shows you how to
|
||||
|
@ -31,9 +32,8 @@ Cypress.Commands.add('getBySel', (selector, ...args) => {
|
|||
});
|
||||
|
||||
Cypress.Commands.add('navigateToHome', () => {
|
||||
cy.get('button[aria-label="Open menu"]').click();
|
||||
cy.getBySel('header-menu-expand-button').click();
|
||||
cy.getBySel('side-nav-items').contains('Home').click();
|
||||
// cy.getBySel('nav-home').click();
|
||||
});
|
||||
|
||||
Cypress.Commands.add('navigateToAdmin', () => {
|
||||
|
@ -76,23 +76,36 @@ Cypress.Commands.add('createModel', (groupId, modelId, modelDisplayName) => {
|
|||
cy.get('input[name=id]').should('have.value', modelId);
|
||||
cy.contains('Submit').click();
|
||||
|
||||
cy.url().should('include', `process-models/${groupId}:${modelId}`);
|
||||
cy.url().should(
|
||||
'include',
|
||||
`process-models/${modifyProcessModelPath(groupId)}:${modelId}`
|
||||
// `process-models/${groupId}:${modelId}`
|
||||
);
|
||||
cy.contains(`Process Model: ${modelDisplayName}`);
|
||||
});
|
||||
|
||||
Cypress.Commands.add('runPrimaryBpmnFile', (reload = true) => {
|
||||
cy.contains('Run').click();
|
||||
cy.contains(/Process Instance.*kicked off/);
|
||||
if (reload) {
|
||||
cy.reload(true);
|
||||
cy.contains(/Process Instance.*kicked off/).should('not.exist');
|
||||
Cypress.Commands.add(
|
||||
'runPrimaryBpmnFile',
|
||||
(expectAutoRedirectToHumanTask = false) => {
|
||||
cy.contains('Run').click();
|
||||
if (expectAutoRedirectToHumanTask) {
|
||||
// the url changes immediately, so also make sure we get some content from the next page, "Task:", or else when we try to interact with the page, it'll re-render and we'll get an error with cypress.
|
||||
cy.url().should('include', `/tasks/`);
|
||||
cy.contains('Task: ');
|
||||
} else {
|
||||
cy.contains(/Process Instance.*kicked off/);
|
||||
cy.reload(true);
|
||||
cy.contains(/Process Instance.*kicked off/).should('not.exist');
|
||||
}
|
||||
}
|
||||
});
|
||||
);
|
||||
|
||||
Cypress.Commands.add(
|
||||
'navigateToProcessModel',
|
||||
(groupDisplayName, modelDisplayName, modelIdentifier) => {
|
||||
cy.navigateToAdmin();
|
||||
cy.contains('Misc').click();
|
||||
cy.contains(`Process Group: 99-Misc`, { timeout: 10000 });
|
||||
cy.contains(groupDisplayName).click();
|
||||
cy.contains(`Process Group: ${groupDisplayName}`);
|
||||
// https://stackoverflow.com/q/51254946/6090676
|
||||
|
@ -125,8 +138,3 @@ Cypress.Commands.add('modifyProcessModelPath', (path) => {
|
|||
path.replace('/', ':');
|
||||
return path;
|
||||
});
|
||||
|
||||
Cypress.Commands.add('modifyProcessModelPath', (path) => {
|
||||
path.replace('/', ':');
|
||||
return path;
|
||||
});
|
||||
|
|
|
@ -24,7 +24,6 @@
|
|||
"@rjsf/mui": "^5.0.0-beta.13",
|
||||
"@rjsf/utils": "^5.0.0-beta.13",
|
||||
"@rjsf/validator-ajv6": "^5.0.0-beta.13",
|
||||
"@rjsf/validator-ajv8": "^5.0.0-beta.13",
|
||||
"@tanstack/react-table": "^8.2.2",
|
||||
"@testing-library/jest-dom": "^5.16.4",
|
||||
"@testing-library/react": "^13.3.0",
|
||||
|
@ -4950,23 +4949,6 @@
|
|||
"@rjsf/utils": "^5.0.0-beta.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@rjsf/validator-ajv8": {
|
||||
"version": "5.0.0-beta.13",
|
||||
"resolved": "https://registry.npmjs.org/@rjsf/validator-ajv8/-/validator-ajv8-5.0.0-beta.13.tgz",
|
||||
"integrity": "sha512-/hrYbiwgCvEqw1Z7YZTWvd+ZAiX5vSN0WAI2hJTJTqKuCTcIH0fqNDCaOg3FBR38BL7seZrUmibIUcPU66iJ1w==",
|
||||
"dependencies": {
|
||||
"ajv-formats": "^2.1.1",
|
||||
"ajv8": "npm:ajv@^8.11.0",
|
||||
"lodash": "^4.17.15",
|
||||
"lodash-es": "^4.17.15"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@rjsf/utils": "^5.0.0-beta.12"
|
||||
}
|
||||
},
|
||||
"node_modules/@rollup/plugin-babel": {
|
||||
"version": "5.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/plugin-babel/-/plugin-babel-5.3.1.tgz",
|
||||
|
@ -6840,27 +6822,6 @@
|
|||
"ajv": "^6.9.1"
|
||||
}
|
||||
},
|
||||
"node_modules/ajv8": {
|
||||
"name": "ajv",
|
||||
"version": "8.11.2",
|
||||
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.2.tgz",
|
||||
"integrity": "sha512-E4bfmKAhGiSTvMfL1Myyycaub+cUEU2/IvpylXkUu7CHBkBj1f/ikdzbD7YQ6FKUbixDxeYvB/xY4fvyroDlQg==",
|
||||
"dependencies": {
|
||||
"fast-deep-equal": "^3.1.1",
|
||||
"json-schema-traverse": "^1.0.0",
|
||||
"require-from-string": "^2.0.2",
|
||||
"uri-js": "^4.2.2"
|
||||
},
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/epoberezkin"
|
||||
}
|
||||
},
|
||||
"node_modules/ajv8/node_modules/json-schema-traverse": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
|
||||
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug=="
|
||||
},
|
||||
"node_modules/ansi-align": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz",
|
||||
|
@ -19562,9 +19523,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/loader-utils": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.2.tgz",
|
||||
"integrity": "sha512-TM57VeHptv569d/GKh6TAYdzKblwDNiumOdkFnejjD0XwTH87K90w3O7AiJRqdQoXygvi1VQTJTLGhJl7WqA7A==",
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz",
|
||||
"integrity": "sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==",
|
||||
"dependencies": {
|
||||
"big.js": "^5.2.2",
|
||||
"emojis-list": "^3.0.0",
|
||||
|
@ -23934,9 +23895,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/react-dev-utils/node_modules/loader-utils": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-3.2.0.tgz",
|
||||
"integrity": "sha512-HVl9ZqccQihZ7JM85dco1MvO9G+ONvxoGa9rkhzFsneGLKSUg1gJf9bWzhRhcvm2qChhWpebQhP44qxjKIUCaQ==",
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-3.2.1.tgz",
|
||||
"integrity": "sha512-ZvFw1KWS3GVyYBYb7qkmRM/WwL2TQQBxgCK62rlvm4WpVQ23Nb4tYjApUlfjrEGvOs7KHEsmyUn75OHZrJMWPw==",
|
||||
"engines": {
|
||||
"node": ">= 12.13.0"
|
||||
}
|
||||
|
@ -25031,25 +24992,14 @@
|
|||
}
|
||||
},
|
||||
"node_modules/recursive-readdir": {
|
||||
"version": "2.2.2",
|
||||
"resolved": "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.2.tgz",
|
||||
"integrity": "sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg==",
|
||||
"version": "2.2.3",
|
||||
"resolved": "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.3.tgz",
|
||||
"integrity": "sha512-8HrF5ZsXk5FAH9dgsx3BlUer73nIhuj+9OrQwEbLTPOBzGkL1lsFCR01am+v+0m2Cmbs1nP12hLDl5FA7EszKA==",
|
||||
"dependencies": {
|
||||
"minimatch": "3.0.4"
|
||||
"minimatch": "^3.0.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/recursive-readdir/node_modules/minimatch": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
},
|
||||
"engines": {
|
||||
"node": "*"
|
||||
"node": ">=6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/redent": {
|
||||
|
@ -26103,9 +26053,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/serve": {
|
||||
"version": "14.0.1",
|
||||
"resolved": "https://registry.npmjs.org/serve/-/serve-14.0.1.tgz",
|
||||
"integrity": "sha512-tNGwxl27FwA8TbmMQqN0jTaSx8/trL532qZsJHX1VdiEIjjtMJHCs7AFS6OvtC7cTHOvmjXqt5yczejU6CV2Xg==",
|
||||
"version": "14.1.1",
|
||||
"resolved": "https://registry.npmjs.org/serve/-/serve-14.1.1.tgz",
|
||||
"integrity": "sha512-7RhRDEirZ7Qyee4QWhBHO9qRtjIGsIPGecDDPzNzlOsjDiZWcq36GS8FioVJAuJPVJBBDTsGp33WWOO4B9A82g==",
|
||||
"dependencies": {
|
||||
"@zeit/schemas": "2.21.0",
|
||||
"ajv": "8.11.0",
|
||||
|
@ -26116,7 +26066,7 @@
|
|||
"clipboardy": "3.0.0",
|
||||
"compression": "1.7.4",
|
||||
"is-port-reachable": "4.0.0",
|
||||
"serve-handler": "6.1.3",
|
||||
"serve-handler": "6.1.5",
|
||||
"update-check": "1.5.4"
|
||||
},
|
||||
"bin": {
|
||||
|
@ -26127,15 +26077,15 @@
|
|||
}
|
||||
},
|
||||
"node_modules/serve-handler": {
|
||||
"version": "6.1.3",
|
||||
"resolved": "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.3.tgz",
|
||||
"integrity": "sha512-FosMqFBNrLyeiIDvP1zgO6YoTzFYHxLDEIavhlmQ+knB2Z7l1t+kGLHkZIDN7UVWqQAmKI3D20A6F6jo3nDd4w==",
|
||||
"version": "6.1.5",
|
||||
"resolved": "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.5.tgz",
|
||||
"integrity": "sha512-ijPFle6Hwe8zfmBxJdE+5fta53fdIY0lHISJvuikXB3VYFafRjMRpOffSPvCYsbKyBA7pvy9oYr/BT1O3EArlg==",
|
||||
"dependencies": {
|
||||
"bytes": "3.0.0",
|
||||
"content-disposition": "0.5.2",
|
||||
"fast-url-parser": "1.1.3",
|
||||
"mime-types": "2.1.18",
|
||||
"minimatch": "3.0.4",
|
||||
"minimatch": "3.1.2",
|
||||
"path-is-inside": "1.0.2",
|
||||
"path-to-regexp": "2.2.1",
|
||||
"range-parser": "1.2.0"
|
||||
|
@ -26160,17 +26110,6 @@
|
|||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/serve-handler/node_modules/minimatch": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
},
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/serve-index": {
|
||||
"version": "1.9.1",
|
||||
"resolved": "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz",
|
||||
|
@ -30730,9 +30669,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/webpack/node_modules/loader-utils": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.4.0.tgz",
|
||||
"integrity": "sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA==",
|
||||
"version": "1.4.2",
|
||||
"resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.4.2.tgz",
|
||||
"integrity": "sha512-I5d00Pd/jwMD2QCduo657+YM/6L3KZu++pmX9VFncxaxvHcru9jx1lBaFft+r4Mt2jK0Yhp41XlRAihzPxHNCg==",
|
||||
"dependencies": {
|
||||
"big.js": "^5.2.2",
|
||||
"emojis-list": "^3.0.0",
|
||||
|
@ -34883,17 +34822,6 @@
|
|||
"lodash-es": "^4.17.15"
|
||||
}
|
||||
},
|
||||
"@rjsf/validator-ajv8": {
|
||||
"version": "5.0.0-beta.13",
|
||||
"resolved": "https://registry.npmjs.org/@rjsf/validator-ajv8/-/validator-ajv8-5.0.0-beta.13.tgz",
|
||||
"integrity": "sha512-/hrYbiwgCvEqw1Z7YZTWvd+ZAiX5vSN0WAI2hJTJTqKuCTcIH0fqNDCaOg3FBR38BL7seZrUmibIUcPU66iJ1w==",
|
||||
"requires": {
|
||||
"ajv-formats": "^2.1.1",
|
||||
"ajv8": "npm:ajv@^8.11.0",
|
||||
"lodash": "^4.17.15",
|
||||
"lodash-es": "^4.17.15"
|
||||
}
|
||||
},
|
||||
"@rollup/plugin-babel": {
|
||||
"version": "5.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/plugin-babel/-/plugin-babel-5.3.1.tgz",
|
||||
|
@ -36367,24 +36295,6 @@
|
|||
"integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==",
|
||||
"requires": {}
|
||||
},
|
||||
"ajv8": {
|
||||
"version": "npm:ajv@8.11.2",
|
||||
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.2.tgz",
|
||||
"integrity": "sha512-E4bfmKAhGiSTvMfL1Myyycaub+cUEU2/IvpylXkUu7CHBkBj1f/ikdzbD7YQ6FKUbixDxeYvB/xY4fvyroDlQg==",
|
||||
"requires": {
|
||||
"fast-deep-equal": "^3.1.1",
|
||||
"json-schema-traverse": "^1.0.0",
|
||||
"require-from-string": "^2.0.2",
|
||||
"uri-js": "^4.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"json-schema-traverse": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
|
||||
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"ansi-align": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz",
|
||||
|
@ -45991,9 +45901,9 @@
|
|||
"integrity": "sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw=="
|
||||
},
|
||||
"loader-utils": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.2.tgz",
|
||||
"integrity": "sha512-TM57VeHptv569d/GKh6TAYdzKblwDNiumOdkFnejjD0XwTH87K90w3O7AiJRqdQoXygvi1VQTJTLGhJl7WqA7A==",
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz",
|
||||
"integrity": "sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==",
|
||||
"requires": {
|
||||
"big.js": "^5.2.2",
|
||||
"emojis-list": "^3.0.0",
|
||||
|
@ -48988,9 +48898,9 @@
|
|||
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="
|
||||
},
|
||||
"loader-utils": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-3.2.0.tgz",
|
||||
"integrity": "sha512-HVl9ZqccQihZ7JM85dco1MvO9G+ONvxoGa9rkhzFsneGLKSUg1gJf9bWzhRhcvm2qChhWpebQhP44qxjKIUCaQ=="
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-3.2.1.tgz",
|
||||
"integrity": "sha512-ZvFw1KWS3GVyYBYb7qkmRM/WwL2TQQBxgCK62rlvm4WpVQ23Nb4tYjApUlfjrEGvOs7KHEsmyUn75OHZrJMWPw=="
|
||||
},
|
||||
"supports-color": {
|
||||
"version": "7.2.0",
|
||||
|
@ -49765,21 +49675,11 @@
|
|||
}
|
||||
},
|
||||
"recursive-readdir": {
|
||||
"version": "2.2.2",
|
||||
"resolved": "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.2.tgz",
|
||||
"integrity": "sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg==",
|
||||
"version": "2.2.3",
|
||||
"resolved": "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.3.tgz",
|
||||
"integrity": "sha512-8HrF5ZsXk5FAH9dgsx3BlUer73nIhuj+9OrQwEbLTPOBzGkL1lsFCR01am+v+0m2Cmbs1nP12hLDl5FA7EszKA==",
|
||||
"requires": {
|
||||
"minimatch": "3.0.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"minimatch": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||
"requires": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
}
|
||||
}
|
||||
"minimatch": "^3.0.5"
|
||||
}
|
||||
},
|
||||
"redent": {
|
||||
|
@ -50593,9 +50493,9 @@
|
|||
}
|
||||
},
|
||||
"serve": {
|
||||
"version": "14.0.1",
|
||||
"resolved": "https://registry.npmjs.org/serve/-/serve-14.0.1.tgz",
|
||||
"integrity": "sha512-tNGwxl27FwA8TbmMQqN0jTaSx8/trL532qZsJHX1VdiEIjjtMJHCs7AFS6OvtC7cTHOvmjXqt5yczejU6CV2Xg==",
|
||||
"version": "14.1.1",
|
||||
"resolved": "https://registry.npmjs.org/serve/-/serve-14.1.1.tgz",
|
||||
"integrity": "sha512-7RhRDEirZ7Qyee4QWhBHO9qRtjIGsIPGecDDPzNzlOsjDiZWcq36GS8FioVJAuJPVJBBDTsGp33WWOO4B9A82g==",
|
||||
"requires": {
|
||||
"@zeit/schemas": "2.21.0",
|
||||
"ajv": "8.11.0",
|
||||
|
@ -50606,7 +50506,7 @@
|
|||
"clipboardy": "3.0.0",
|
||||
"compression": "1.7.4",
|
||||
"is-port-reachable": "4.0.0",
|
||||
"serve-handler": "6.1.3",
|
||||
"serve-handler": "6.1.5",
|
||||
"update-check": "1.5.4"
|
||||
},
|
||||
"dependencies": {
|
||||
|
@ -50634,15 +50534,15 @@
|
|||
}
|
||||
},
|
||||
"serve-handler": {
|
||||
"version": "6.1.3",
|
||||
"resolved": "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.3.tgz",
|
||||
"integrity": "sha512-FosMqFBNrLyeiIDvP1zgO6YoTzFYHxLDEIavhlmQ+knB2Z7l1t+kGLHkZIDN7UVWqQAmKI3D20A6F6jo3nDd4w==",
|
||||
"version": "6.1.5",
|
||||
"resolved": "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.5.tgz",
|
||||
"integrity": "sha512-ijPFle6Hwe8zfmBxJdE+5fta53fdIY0lHISJvuikXB3VYFafRjMRpOffSPvCYsbKyBA7pvy9oYr/BT1O3EArlg==",
|
||||
"requires": {
|
||||
"bytes": "3.0.0",
|
||||
"content-disposition": "0.5.2",
|
||||
"fast-url-parser": "1.1.3",
|
||||
"mime-types": "2.1.18",
|
||||
"minimatch": "3.0.4",
|
||||
"minimatch": "3.1.2",
|
||||
"path-is-inside": "1.0.2",
|
||||
"path-to-regexp": "2.2.1",
|
||||
"range-parser": "1.2.0"
|
||||
|
@ -50660,14 +50560,6 @@
|
|||
"requires": {
|
||||
"mime-db": "~1.33.0"
|
||||
}
|
||||
},
|
||||
"minimatch": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||
"requires": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -54022,9 +53914,9 @@
|
|||
}
|
||||
},
|
||||
"loader-utils": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.4.0.tgz",
|
||||
"integrity": "sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA==",
|
||||
"version": "1.4.2",
|
||||
"resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.4.2.tgz",
|
||||
"integrity": "sha512-I5d00Pd/jwMD2QCduo657+YM/6L3KZu++pmX9VFncxaxvHcru9jx1lBaFft+r4Mt2jK0Yhp41XlRAihzPxHNCg==",
|
||||
"requires": {
|
||||
"big.js": "^5.2.2",
|
||||
"emojis-list": "^3.0.0",
|
||||
|
|
|
@ -46,7 +46,7 @@ export default function ButtonWithConfirmation({
|
|||
<Modal
|
||||
open={showConfirmationPrompt}
|
||||
danger
|
||||
data-qa="modal-confirmation-dialog"
|
||||
data-qa={`${dataQa}-modal-confirmation-dialog`}
|
||||
modalHeading={description}
|
||||
modalLabel={title}
|
||||
primaryButtonText={confirmButtonLabel}
|
||||
|
|
|
@ -17,9 +17,13 @@ import {
|
|||
import { Logout, Login } from '@carbon/icons-react';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { Can } from '@casl/react';
|
||||
// @ts-expect-error TS(2307) FIXME: Cannot find module '../logo.svg' or its correspond... Remove this comment to see the full error message
|
||||
import logo from '../logo.svg';
|
||||
import UserService from '../services/UserService';
|
||||
import { useUriListForPermissions } from '../hooks/UriListForPermissions';
|
||||
import { PermissionsToCheck } from '../interfaces';
|
||||
import { usePermissionFetcher } from '../hooks/PermissionService';
|
||||
|
||||
// for ref: https://react-bootstrap.github.io/components/navbar/
|
||||
export default function NavigationBar() {
|
||||
|
@ -34,6 +38,14 @@ export default function NavigationBar() {
|
|||
const location = useLocation();
|
||||
const [activeKey, setActiveKey] = useState<string>('');
|
||||
|
||||
const { targetUris } = useUriListForPermissions();
|
||||
const permissionRequestData: PermissionsToCheck = {
|
||||
[targetUris.authenticationListPath]: ['GET'],
|
||||
[targetUris.messageInstanceListPath]: ['GET'],
|
||||
[targetUris.secretListPath]: ['GET'],
|
||||
};
|
||||
const { ability } = usePermissionFetcher(permissionRequestData);
|
||||
|
||||
useEffect(() => {
|
||||
let newActiveKey = '/admin/process-groups';
|
||||
if (location.pathname.match(/^\/admin\/messages\b/)) {
|
||||
|
@ -62,7 +74,9 @@ export default function NavigationBar() {
|
|||
if (UserService.isLoggedIn()) {
|
||||
return (
|
||||
<>
|
||||
<HeaderGlobalAction>{UserService.getUsername()}</HeaderGlobalAction>
|
||||
<HeaderGlobalAction className="username-header-text">
|
||||
{UserService.getUsername()}
|
||||
</HeaderGlobalAction>
|
||||
<HeaderGlobalAction
|
||||
aria-label="Logout"
|
||||
onClick={handleLogout}
|
||||
|
@ -84,6 +98,42 @@ export default function NavigationBar() {
|
|||
);
|
||||
};
|
||||
|
||||
const configurationElement = () => {
|
||||
return (
|
||||
<Can
|
||||
I="GET"
|
||||
a={targetUris.authenticationListPath}
|
||||
ability={ability}
|
||||
passThrough
|
||||
>
|
||||
{(authenticationAllowed: boolean) => {
|
||||
return (
|
||||
<Can
|
||||
I="GET"
|
||||
a={targetUris.secretListPath}
|
||||
ability={ability}
|
||||
passThrough
|
||||
>
|
||||
{(secretAllowed: boolean) => {
|
||||
if (secretAllowed || authenticationAllowed) {
|
||||
return (
|
||||
<HeaderMenuItem
|
||||
href="/admin/configuration"
|
||||
isCurrentPage={isActivePage('/admin/configuration')}
|
||||
>
|
||||
Configuration
|
||||
</HeaderMenuItem>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}}
|
||||
</Can>
|
||||
);
|
||||
}}
|
||||
</Can>
|
||||
);
|
||||
};
|
||||
|
||||
const headerMenuItems = () => {
|
||||
return (
|
||||
<>
|
||||
|
@ -103,35 +153,33 @@ export default function NavigationBar() {
|
|||
>
|
||||
Process Instances
|
||||
</HeaderMenuItem>
|
||||
<HeaderMenuItem
|
||||
href="/admin/messages"
|
||||
isCurrentPage={isActivePage('/admin/messages')}
|
||||
>
|
||||
Messages
|
||||
</HeaderMenuItem>
|
||||
<HeaderMenuItem
|
||||
href="/admin/configuration"
|
||||
isCurrentPage={isActivePage('/admin/configuration')}
|
||||
>
|
||||
Configuration
|
||||
</HeaderMenuItem>
|
||||
<Can I="GET" a={targetUris.messageInstanceListPath} ability={ability}>
|
||||
<HeaderMenuItem
|
||||
href="/admin/messages"
|
||||
isCurrentPage={isActivePage('/admin/messages')}
|
||||
>
|
||||
Messages
|
||||
</HeaderMenuItem>
|
||||
</Can>
|
||||
{configurationElement()}
|
||||
<HeaderMenuItem
|
||||
href="/admin/process-instances/reports"
|
||||
isCurrentPage={isActivePage('/admin/process-instances/reports')}
|
||||
>
|
||||
Reports
|
||||
Perspectives
|
||||
</HeaderMenuItem>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
if (activeKey) {
|
||||
if (activeKey && ability) {
|
||||
return (
|
||||
<HeaderContainer
|
||||
render={({ isSideNavExpanded, onClickSideNavExpand }: any) => (
|
||||
<Header aria-label="IBM Platform Name" className="cds--g100">
|
||||
<SkipToContent />
|
||||
<HeaderMenuButton
|
||||
data-qa="header-menu-expand-button"
|
||||
aria-label="Open menu"
|
||||
onClick={onClickSideNavExpand}
|
||||
isActive={isSideNavExpanded}
|
||||
|
|
|
@ -2,7 +2,7 @@ import { useState } from 'react';
|
|||
import { useNavigate } from 'react-router-dom';
|
||||
// @ts-ignore
|
||||
import { Button, ButtonSet, Form, Stack, TextInput } from '@carbon/react';
|
||||
import { modifyProcessModelPath, slugifyString } from '../helpers';
|
||||
import { modifyProcessIdentifierForPathParam, slugifyString } from '../helpers';
|
||||
import HttpService from '../services/HttpService';
|
||||
import { ProcessGroup } from '../interfaces';
|
||||
import ButtonWithConfirmation from './ButtonWithConfirmation';
|
||||
|
@ -28,7 +28,9 @@ export default function ProcessGroupForm({
|
|||
const navigateToProcessGroup = (_result: any) => {
|
||||
if (newProcessGroupId) {
|
||||
navigate(
|
||||
`/admin/process-groups/${modifyProcessModelPath(newProcessGroupId)}`
|
||||
`/admin/process-groups/${modifyProcessIdentifierForPathParam(
|
||||
newProcessGroupId
|
||||
)}`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
@ -43,7 +45,9 @@ export default function ProcessGroupForm({
|
|||
|
||||
const deleteProcessGroup = () => {
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/process-groups/${modifyProcessModelPath(processGroup.id)}`,
|
||||
path: `/process-groups/${modifyProcessIdentifierForPathParam(
|
||||
processGroup.id
|
||||
)}`,
|
||||
successCallback: navigateToProcessGroups,
|
||||
httpMethod: 'DELETE',
|
||||
});
|
||||
|
@ -55,7 +59,7 @@ export default function ProcessGroupForm({
|
|||
|
||||
event.preventDefault();
|
||||
let hasErrors = false;
|
||||
if (!hasValidIdentifier(processGroup.id)) {
|
||||
if (mode === 'new' && !hasValidIdentifier(processGroup.id)) {
|
||||
setIdentifierInvalid(true);
|
||||
hasErrors = true;
|
||||
}
|
||||
|
@ -68,7 +72,9 @@ export default function ProcessGroupForm({
|
|||
}
|
||||
let path = '/process-groups';
|
||||
if (mode === 'edit') {
|
||||
path = `/process-groups/${processGroup.id}`;
|
||||
path = `/process-groups/${modifyProcessIdentifierForPathParam(
|
||||
processGroup.id
|
||||
)}`;
|
||||
}
|
||||
let httpMethod = 'POST';
|
||||
if (mode === 'edit') {
|
||||
|
@ -169,6 +175,7 @@ export default function ProcessGroupForm({
|
|||
if (mode === 'edit') {
|
||||
buttons.push(
|
||||
<ButtonWithConfirmation
|
||||
data-qa="delete-process-group-button"
|
||||
description={`Delete Process Group ${processGroup.id}?`}
|
||||
onConfirmation={deleteProcessGroup}
|
||||
buttonLabel="Delete"
|
||||
|
|
|
@ -0,0 +1,100 @@
|
|||
import { ReactElement, useEffect, useState } from 'react';
|
||||
import { useSearchParams } from 'react-router-dom';
|
||||
import {
|
||||
ArrowRight,
|
||||
// @ts-ignore
|
||||
} from '@carbon/icons-react';
|
||||
import {
|
||||
ClickableTile,
|
||||
// @ts-ignore
|
||||
} from '@carbon/react';
|
||||
import HttpService from '../services/HttpService';
|
||||
import { ProcessGroup } from '../interfaces';
|
||||
import {
|
||||
modifyProcessIdentifierForPathParam,
|
||||
truncateString,
|
||||
} from '../helpers';
|
||||
|
||||
type OwnProps = {
|
||||
processGroup?: ProcessGroup;
|
||||
headerElement?: ReactElement;
|
||||
};
|
||||
|
||||
export default function ProcessGroupListTiles({
|
||||
processGroup,
|
||||
headerElement,
|
||||
}: OwnProps) {
|
||||
const [searchParams] = useSearchParams();
|
||||
|
||||
const [processGroups, setProcessGroups] = useState<ProcessGroup[] | null>(
|
||||
null
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const setProcessGroupsFromResult = (result: any) => {
|
||||
setProcessGroups(result.results);
|
||||
};
|
||||
let queryParams = '?per_page=1000';
|
||||
if (processGroup) {
|
||||
queryParams = `${queryParams}&process_group_identifier=${processGroup.id}`;
|
||||
}
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/process-groups${queryParams}`,
|
||||
successCallback: setProcessGroupsFromResult,
|
||||
});
|
||||
}, [searchParams, processGroup]);
|
||||
|
||||
const processGroupDirectChildrenCount = (pg: ProcessGroup) => {
|
||||
return (pg.process_models || []).length + (pg.process_groups || []).length;
|
||||
};
|
||||
|
||||
const processGroupsDisplayArea = () => {
|
||||
let displayText = null;
|
||||
if (processGroups && processGroups.length > 0) {
|
||||
displayText = (processGroups || []).map((row: ProcessGroup) => {
|
||||
return (
|
||||
<ClickableTile
|
||||
id={`process-group-tile-${row.id}`}
|
||||
className="tile-process-group"
|
||||
href={`/admin/process-groups/${modifyProcessIdentifierForPathParam(
|
||||
row.id
|
||||
)}`}
|
||||
>
|
||||
<div className="tile-process-group-content-container">
|
||||
<ArrowRight />
|
||||
<div className="tile-process-group-display-name">
|
||||
{row.display_name}
|
||||
</div>
|
||||
<p className="tile-description">
|
||||
{truncateString(row.description || '', 100)}
|
||||
</p>
|
||||
<p className="tile-process-group-children-count tile-pin-bottom">
|
||||
Total Sub Items: {processGroupDirectChildrenCount(row)}
|
||||
</p>
|
||||
</div>
|
||||
</ClickableTile>
|
||||
);
|
||||
});
|
||||
} else {
|
||||
displayText = <p>No Groups To Display</p>;
|
||||
}
|
||||
return displayText;
|
||||
};
|
||||
|
||||
const processGroupArea = () => {
|
||||
if (processGroups && (!processGroup || processGroups.length > 0)) {
|
||||
return (
|
||||
<>
|
||||
{headerElement}
|
||||
{processGroupsDisplayArea()}
|
||||
</>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
if (processGroups) {
|
||||
return <>{processGroupArea()}</>;
|
||||
}
|
||||
return null;
|
||||
}
|
|
@ -20,15 +20,19 @@ import {
|
|||
TableHeader,
|
||||
TableHead,
|
||||
TableRow,
|
||||
TimePicker,
|
||||
// @ts-ignore
|
||||
} from '@carbon/react';
|
||||
import { PROCESS_STATUSES, DATE_FORMAT, DATE_FORMAT_CARBON } from '../config';
|
||||
import {
|
||||
convertDateStringToSeconds,
|
||||
convertSecondsToFormattedDate,
|
||||
convertDateAndTimeStringsToSeconds,
|
||||
convertDateObjectToFormattedHoursMinutes,
|
||||
convertSecondsToFormattedDateString,
|
||||
convertSecondsToFormattedDateTime,
|
||||
convertSecondsToFormattedTimeHoursMinutes,
|
||||
getPageInfoFromSearchParams,
|
||||
getProcessModelFullIdentifierFromSearchParams,
|
||||
modifyProcessModelPath,
|
||||
modifyProcessIdentifierForPathParam,
|
||||
} from '../helpers';
|
||||
|
||||
import PaginationForTable from './PaginationForTable';
|
||||
|
@ -39,8 +43,13 @@ import HttpService from '../services/HttpService';
|
|||
|
||||
import 'react-bootstrap-typeahead/css/Typeahead.css';
|
||||
import 'react-bootstrap-typeahead/css/Typeahead.bs5.css';
|
||||
import { PaginationObject, ProcessModel } from '../interfaces';
|
||||
import {
|
||||
PaginationObject,
|
||||
ProcessModel,
|
||||
ProcessInstanceReport,
|
||||
} from '../interfaces';
|
||||
import ProcessModelSearch from './ProcessModelSearch';
|
||||
import ProcessInstanceReportSearch from './ProcessInstanceReportSearch';
|
||||
|
||||
type OwnProps = {
|
||||
filtersEnabled?: boolean;
|
||||
|
@ -49,6 +58,10 @@ type OwnProps = {
|
|||
perPageOptions?: number[];
|
||||
};
|
||||
|
||||
interface dateParameters {
|
||||
[key: string]: ((..._args: any[]) => any)[];
|
||||
}
|
||||
|
||||
export default function ProcessInstanceListTable({
|
||||
filtersEnabled = true,
|
||||
processModelFullIdentifier,
|
||||
|
@ -66,11 +79,20 @@ export default function ProcessInstanceListTable({
|
|||
|
||||
const oneHourInSeconds = 3600;
|
||||
const oneMonthInSeconds = oneHourInSeconds * 24 * 30;
|
||||
const [startFrom, setStartFrom] = useState<string>('');
|
||||
const [startTo, setStartTo] = useState<string>('');
|
||||
const [endFrom, setEndFrom] = useState<string>('');
|
||||
const [endTo, setEndTo] = useState<string>('');
|
||||
const [startFromDate, setStartFromDate] = useState<string>('');
|
||||
const [startToDate, setStartToDate] = useState<string>('');
|
||||
const [endFromDate, setEndFromDate] = useState<string>('');
|
||||
const [endToDate, setEndToDate] = useState<string>('');
|
||||
const [startFromTime, setStartFromTime] = useState<string>('');
|
||||
const [startToTime, setStartToTime] = useState<string>('');
|
||||
const [endFromTime, setEndFromTime] = useState<string>('');
|
||||
const [endToTime, setEndToTime] = useState<string>('');
|
||||
const [showFilterOptions, setShowFilterOptions] = useState<boolean>(false);
|
||||
const [startFromTimeInvalid, setStartFromTimeInvalid] =
|
||||
useState<boolean>(false);
|
||||
const [startToTimeInvalid, setStartToTimeInvalid] = useState<boolean>(false);
|
||||
const [endFromTimeInvalid, setEndFromTimeInvalid] = useState<boolean>(false);
|
||||
const [endToTimeInvalid, setEndToTimeInvalid] = useState<boolean>(false);
|
||||
|
||||
const setErrorMessage = (useContext as any)(ErrorContext)[1];
|
||||
|
||||
|
@ -85,15 +107,26 @@ export default function ProcessInstanceListTable({
|
|||
>([]);
|
||||
const [processModelSelection, setProcessModelSelection] =
|
||||
useState<ProcessModel | null>(null);
|
||||
const [processInstanceReportSelection, setProcessInstanceReportSelection] =
|
||||
useState<ProcessInstanceReport | null>(null);
|
||||
|
||||
const parametersToAlwaysFilterBy = useMemo(() => {
|
||||
const dateParametersToAlwaysFilterBy: dateParameters = useMemo(() => {
|
||||
return {
|
||||
start_from: setStartFrom,
|
||||
start_to: setStartTo,
|
||||
end_from: setEndFrom,
|
||||
end_to: setEndTo,
|
||||
start_from: [setStartFromDate, setStartFromTime],
|
||||
start_to: [setStartToDate, setStartToTime],
|
||||
end_from: [setEndFromDate, setEndFromTime],
|
||||
end_to: [setEndToDate, setEndToTime],
|
||||
};
|
||||
}, [setStartFrom, setStartTo, setEndFrom, setEndTo]);
|
||||
}, [
|
||||
setStartFromDate,
|
||||
setStartFromTime,
|
||||
setStartToDate,
|
||||
setStartToTime,
|
||||
setEndFromDate,
|
||||
setEndFromTime,
|
||||
setEndToDate,
|
||||
setEndToTime,
|
||||
]);
|
||||
|
||||
const parametersToGetFromSearchParams = useMemo(() => {
|
||||
return {
|
||||
|
@ -110,6 +143,14 @@ export default function ProcessInstanceListTable({
|
|||
setReportMetadata(result.report_metadata);
|
||||
setPagination(result.pagination);
|
||||
setProcessInstanceFilters(result.filters);
|
||||
|
||||
// TODO: need to iron out this interaction some more
|
||||
if (result.report_identifier !== 'default') {
|
||||
setProcessInstanceReportSelection({
|
||||
id: result.report_identifier,
|
||||
display_name: result.report_identifier,
|
||||
});
|
||||
}
|
||||
}
|
||||
function getProcessInstances() {
|
||||
// eslint-disable-next-line prefer-const
|
||||
|
@ -130,19 +171,32 @@ export default function ProcessInstanceListTable({
|
|||
queryParamString += `&user_filter=${userAppliedFilter}`;
|
||||
}
|
||||
|
||||
Object.keys(parametersToAlwaysFilterBy).forEach((paramName: string) => {
|
||||
// @ts-expect-error TS(7053) FIXME:
|
||||
const functionToCall = parametersToAlwaysFilterBy[paramName];
|
||||
const searchParamValue = searchParams.get(paramName);
|
||||
if (searchParamValue) {
|
||||
queryParamString += `&${paramName}=${searchParamValue}`;
|
||||
const dateString = convertSecondsToFormattedDate(
|
||||
searchParamValue as any
|
||||
);
|
||||
functionToCall(dateString);
|
||||
setShowFilterOptions(true);
|
||||
const reportIdentifier = searchParams.get('report_identifier');
|
||||
if (reportIdentifier) {
|
||||
queryParamString += `&report_identifier=${reportIdentifier}`;
|
||||
}
|
||||
|
||||
Object.keys(dateParametersToAlwaysFilterBy).forEach(
|
||||
(paramName: string) => {
|
||||
const dateFunctionToCall =
|
||||
dateParametersToAlwaysFilterBy[paramName][0];
|
||||
const timeFunctionToCall =
|
||||
dateParametersToAlwaysFilterBy[paramName][1];
|
||||
const searchParamValue = searchParams.get(paramName);
|
||||
if (searchParamValue) {
|
||||
queryParamString += `&${paramName}=${searchParamValue}`;
|
||||
const dateString = convertSecondsToFormattedDateString(
|
||||
searchParamValue as any
|
||||
);
|
||||
dateFunctionToCall(dateString);
|
||||
const timeString = convertSecondsToFormattedTimeHoursMinutes(
|
||||
searchParamValue as any
|
||||
);
|
||||
timeFunctionToCall(timeString);
|
||||
setShowFilterOptions(true);
|
||||
}
|
||||
}
|
||||
});
|
||||
);
|
||||
|
||||
Object.keys(parametersToGetFromSearchParams).forEach(
|
||||
(paramName: string) => {
|
||||
|
@ -200,7 +254,7 @@ export default function ProcessInstanceListTable({
|
|||
if (filtersEnabled) {
|
||||
// populate process model selection
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/process-models?per_page=1000`,
|
||||
path: `/process-models?per_page=1000&recursive=true`,
|
||||
successCallback: processResultForProcessModels,
|
||||
});
|
||||
} else {
|
||||
|
@ -211,7 +265,7 @@ export default function ProcessInstanceListTable({
|
|||
params,
|
||||
oneMonthInSeconds,
|
||||
oneHourInSeconds,
|
||||
parametersToAlwaysFilterBy,
|
||||
dateParametersToAlwaysFilterBy,
|
||||
parametersToGetFromSearchParams,
|
||||
filtersEnabled,
|
||||
paginationQueryParamPrefix,
|
||||
|
@ -219,16 +273,25 @@ export default function ProcessInstanceListTable({
|
|||
perPageOptions,
|
||||
]);
|
||||
|
||||
// This sets the filter data using the saved reports returned from the initial instance_list query.
|
||||
// This could probably be merged into the main useEffect but it works here now.
|
||||
useEffect(() => {
|
||||
const filters = processInstanceFilters as any;
|
||||
Object.keys(parametersToAlwaysFilterBy).forEach((paramName: string) => {
|
||||
// @ts-expect-error TS(7053) FIXME:
|
||||
const functionToCall = parametersToAlwaysFilterBy[paramName];
|
||||
Object.keys(dateParametersToAlwaysFilterBy).forEach((paramName: string) => {
|
||||
const dateFunctionToCall = dateParametersToAlwaysFilterBy[paramName][0];
|
||||
const timeFunctionToCall = dateParametersToAlwaysFilterBy[paramName][1];
|
||||
const paramValue = filters[paramName];
|
||||
functionToCall('');
|
||||
dateFunctionToCall('');
|
||||
timeFunctionToCall('');
|
||||
if (paramValue) {
|
||||
const dateString = convertSecondsToFormattedDate(paramValue as any);
|
||||
functionToCall(dateString);
|
||||
const dateString = convertSecondsToFormattedDateString(
|
||||
paramValue as any
|
||||
);
|
||||
dateFunctionToCall(dateString);
|
||||
const timeString = convertSecondsToFormattedTimeHoursMinutes(
|
||||
paramValue as any
|
||||
);
|
||||
timeFunctionToCall(timeString);
|
||||
setShowFilterOptions(true);
|
||||
}
|
||||
});
|
||||
|
@ -253,7 +316,7 @@ export default function ProcessInstanceListTable({
|
|||
setProcessStatusSelection(processStatusSelectedArray);
|
||||
}, [
|
||||
processInstanceFilters,
|
||||
parametersToAlwaysFilterBy,
|
||||
dateParametersToAlwaysFilterBy,
|
||||
parametersToGetFromSearchParams,
|
||||
processModelAvailableItems,
|
||||
]);
|
||||
|
@ -285,10 +348,22 @@ export default function ProcessInstanceListTable({
|
|||
);
|
||||
let queryParamString = `per_page=${perPage}&page=${page}&user_filter=true`;
|
||||
|
||||
const startFromSeconds = convertDateStringToSeconds(startFrom);
|
||||
const endFromSeconds = convertDateStringToSeconds(endFrom);
|
||||
const startToSeconds = convertDateStringToSeconds(startTo);
|
||||
const endToSeconds = convertDateStringToSeconds(endTo);
|
||||
const startFromSeconds = convertDateAndTimeStringsToSeconds(
|
||||
startFromDate,
|
||||
startFromTime || '00:00:00'
|
||||
);
|
||||
const startToSeconds = convertDateAndTimeStringsToSeconds(
|
||||
startToDate,
|
||||
startToTime || '00:00:00'
|
||||
);
|
||||
const endFromSeconds = convertDateAndTimeStringsToSeconds(
|
||||
endFromDate,
|
||||
endFromTime || '00:00:00'
|
||||
);
|
||||
const endToSeconds = convertDateAndTimeStringsToSeconds(
|
||||
endToDate,
|
||||
endToTime || '00:00:00'
|
||||
);
|
||||
if (isTrueComparison(startFromSeconds, '>', startToSeconds)) {
|
||||
setErrorMessage({
|
||||
message: '"Start date from" cannot be after "start date to"',
|
||||
|
@ -334,6 +409,10 @@ export default function ProcessInstanceListTable({
|
|||
queryParamString += `&process_model_identifier=${processModelSelection.id}`;
|
||||
}
|
||||
|
||||
if (processInstanceReportSelection) {
|
||||
queryParamString += `&report_identifier=${processInstanceReportSelection.id}`;
|
||||
}
|
||||
|
||||
setErrorMessage(null);
|
||||
navigate(`/admin/process-instances?${queryParamString}`);
|
||||
};
|
||||
|
@ -342,24 +421,50 @@ export default function ProcessInstanceListTable({
|
|||
labelString: any,
|
||||
name: any,
|
||||
initialDate: any,
|
||||
onChangeFunction: any
|
||||
initialTime: string,
|
||||
onChangeDateFunction: any,
|
||||
onChangeTimeFunction: any,
|
||||
timeInvalid: boolean,
|
||||
setTimeInvalid: any
|
||||
) => {
|
||||
return (
|
||||
<DatePicker dateFormat={DATE_FORMAT_CARBON} datePickerType="single">
|
||||
<DatePickerInput
|
||||
id={`date-picker-${name}`}
|
||||
placeholder={DATE_FORMAT}
|
||||
labelText={labelString}
|
||||
type="text"
|
||||
size="md"
|
||||
autocomplete="off"
|
||||
allowInput={false}
|
||||
onChange={(dateChangeEvent: any) => {
|
||||
onChangeFunction(dateChangeEvent.srcElement.value);
|
||||
<>
|
||||
<DatePicker dateFormat={DATE_FORMAT_CARBON} datePickerType="single">
|
||||
<DatePickerInput
|
||||
id={`date-picker-${name}`}
|
||||
placeholder={DATE_FORMAT}
|
||||
labelText={labelString}
|
||||
type="text"
|
||||
size="md"
|
||||
autocomplete="off"
|
||||
allowInput={false}
|
||||
onChange={(dateChangeEvent: any) => {
|
||||
if (!initialDate && !initialTime) {
|
||||
onChangeTimeFunction(
|
||||
convertDateObjectToFormattedHoursMinutes(new Date())
|
||||
);
|
||||
}
|
||||
onChangeDateFunction(dateChangeEvent.srcElement.value);
|
||||
}}
|
||||
value={initialDate}
|
||||
/>
|
||||
</DatePicker>
|
||||
<TimePicker
|
||||
invalid={timeInvalid}
|
||||
id="time-picker"
|
||||
labelText="Select a time"
|
||||
pattern="^([01]\d|2[0-3]):?([0-5]\d)$"
|
||||
value={initialTime}
|
||||
onChange={(event: any) => {
|
||||
if (event.srcElement.validity.valid) {
|
||||
setTimeInvalid(false);
|
||||
} else {
|
||||
setTimeInvalid(true);
|
||||
}
|
||||
onChangeTimeFunction(event.srcElement.value);
|
||||
}}
|
||||
value={initialDate}
|
||||
/>
|
||||
</DatePicker>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
|
@ -386,10 +491,14 @@ export default function ProcessInstanceListTable({
|
|||
const clearFilters = () => {
|
||||
setProcessModelSelection(null);
|
||||
setProcessStatusSelection([]);
|
||||
setStartFrom('');
|
||||
setStartTo('');
|
||||
setEndFrom('');
|
||||
setEndTo('');
|
||||
setStartFromDate('');
|
||||
setStartFromTime('');
|
||||
setStartToDate('');
|
||||
setStartToTime('');
|
||||
setEndFromDate('');
|
||||
setEndFromTime('');
|
||||
setEndToDate('');
|
||||
setEndToTime('');
|
||||
};
|
||||
|
||||
const filterOptions = () => {
|
||||
|
@ -415,18 +524,49 @@ export default function ProcessInstanceListTable({
|
|||
{dateComponent(
|
||||
'Start date from',
|
||||
'start-from',
|
||||
startFrom,
|
||||
setStartFrom
|
||||
startFromDate,
|
||||
startFromTime,
|
||||
setStartFromDate,
|
||||
setStartFromTime,
|
||||
startFromTimeInvalid,
|
||||
setStartFromTimeInvalid
|
||||
)}
|
||||
</Column>
|
||||
<Column md={4}>
|
||||
{dateComponent('Start date to', 'start-to', startTo, setStartTo)}
|
||||
{dateComponent(
|
||||
'Start date to',
|
||||
'start-to',
|
||||
startToDate,
|
||||
startToTime,
|
||||
setStartToDate,
|
||||
setStartToTime,
|
||||
startToTimeInvalid,
|
||||
setStartToTimeInvalid
|
||||
)}
|
||||
</Column>
|
||||
<Column md={4}>
|
||||
{dateComponent('End date from', 'end-from', endFrom, setEndFrom)}
|
||||
{dateComponent(
|
||||
'End date from',
|
||||
'end-from',
|
||||
endFromDate,
|
||||
endFromTime,
|
||||
setEndFromDate,
|
||||
setEndFromTime,
|
||||
endFromTimeInvalid,
|
||||
setEndFromTimeInvalid
|
||||
)}
|
||||
</Column>
|
||||
<Column md={4}>
|
||||
{dateComponent('End date to', 'end-to', endTo, setEndTo)}
|
||||
{dateComponent(
|
||||
'End date to',
|
||||
'end-to',
|
||||
endToDate,
|
||||
endToTime,
|
||||
setEndToDate,
|
||||
setEndToTime,
|
||||
endToTimeInvalid,
|
||||
setEndToTimeInvalid
|
||||
)}
|
||||
</Column>
|
||||
</Grid>
|
||||
<Grid fullWidth className="with-bottom-margin">
|
||||
|
@ -471,9 +611,8 @@ export default function ProcessInstanceListTable({
|
|||
});
|
||||
|
||||
const formatProcessInstanceId = (row: any, id: any) => {
|
||||
const modifiedProcessModelId: String = modifyProcessModelPath(
|
||||
row.process_model_identifier
|
||||
);
|
||||
const modifiedProcessModelId: String =
|
||||
modifyProcessIdentifierForPathParam(row.process_model_identifier);
|
||||
return (
|
||||
<Link
|
||||
data-qa="process-instance-show-link"
|
||||
|
@ -486,14 +625,16 @@ export default function ProcessInstanceListTable({
|
|||
const formatProcessModelIdentifier = (_row: any, identifier: any) => {
|
||||
return (
|
||||
<Link
|
||||
to={`/admin/process-models/${modifyProcessModelPath(identifier)}`}
|
||||
to={`/admin/process-models/${modifyProcessIdentifierForPathParam(
|
||||
identifier
|
||||
)}`}
|
||||
>
|
||||
{identifier}
|
||||
</Link>
|
||||
);
|
||||
};
|
||||
const formatSecondsForDisplay = (_row: any, seconds: any) => {
|
||||
return convertSecondsToFormattedDate(seconds) || '-';
|
||||
return convertSecondsToFormattedDateTime(seconds) || '-';
|
||||
};
|
||||
const defaultFormatter = (_row: any, value: any) => {
|
||||
return value;
|
||||
|
@ -548,6 +689,29 @@ export default function ProcessInstanceListTable({
|
|||
setShowFilterOptions(!showFilterOptions);
|
||||
};
|
||||
|
||||
const processInstanceReportDidChange = (selection: any) => {
|
||||
clearFilters();
|
||||
|
||||
const selectedReport = selection.selectedItem;
|
||||
setProcessInstanceReportSelection(selectedReport);
|
||||
|
||||
const queryParamString = selectedReport
|
||||
? `&report_identifier=${selectedReport.id}`
|
||||
: '';
|
||||
|
||||
setErrorMessage(null);
|
||||
navigate(`/admin/process-instances?${queryParamString}`);
|
||||
};
|
||||
|
||||
const reportSearchComponent = () => {
|
||||
return (
|
||||
<ProcessInstanceReportSearch
|
||||
onChange={processInstanceReportDidChange}
|
||||
selectedItem={processInstanceReportSelection}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
const filterComponent = () => {
|
||||
if (!filtersEnabled) {
|
||||
return null;
|
||||
|
@ -591,6 +755,7 @@ export default function ProcessInstanceListTable({
|
|||
return (
|
||||
<>
|
||||
{filterComponent()}
|
||||
{reportSearchComponent()}
|
||||
<PaginationForTable
|
||||
page={page}
|
||||
perPage={perPage}
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
import { useState } from 'react';
|
||||
import {
|
||||
ComboBox,
|
||||
// @ts-ignore
|
||||
} from '@carbon/react';
|
||||
import { truncateString } from '../helpers';
|
||||
import { ProcessInstanceReport } from '../interfaces';
|
||||
import HttpService from '../services/HttpService';
|
||||
|
||||
type OwnProps = {
|
||||
onChange: (..._args: any[]) => any;
|
||||
selectedItem?: ProcessInstanceReport | null;
|
||||
titleText?: string;
|
||||
};
|
||||
|
||||
export default function ProcessInstanceReportSearch({
|
||||
selectedItem,
|
||||
onChange,
|
||||
titleText = 'Process instance perspectives',
|
||||
}: OwnProps) {
|
||||
const [processInstanceReports, setProcessInstanceReports] = useState<
|
||||
ProcessInstanceReport[] | null
|
||||
>(null);
|
||||
|
||||
function setProcessInstanceReportsFromResult(result: any) {
|
||||
const processInstanceReportsFromApi = result.map((item: any) => {
|
||||
return { id: item.identifier, display_name: item.identifier };
|
||||
});
|
||||
setProcessInstanceReports(processInstanceReportsFromApi);
|
||||
}
|
||||
|
||||
if (processInstanceReports === null) {
|
||||
setProcessInstanceReports([]);
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/process-instances/reports`,
|
||||
successCallback: setProcessInstanceReportsFromResult,
|
||||
});
|
||||
}
|
||||
|
||||
const shouldFilterProcessInstanceReport = (options: any) => {
|
||||
const processInstanceReport: ProcessInstanceReport = options.item;
|
||||
const { inputValue } = options;
|
||||
return `${processInstanceReport.id} (${processInstanceReport.display_name})`.includes(
|
||||
inputValue
|
||||
);
|
||||
};
|
||||
|
||||
const reportsAvailable = () => {
|
||||
return processInstanceReports && processInstanceReports.length > 0;
|
||||
};
|
||||
|
||||
return reportsAvailable() ? (
|
||||
<ComboBox
|
||||
onChange={onChange}
|
||||
id="process-instance-report-select"
|
||||
data-qa="process-instance-report-selection"
|
||||
items={processInstanceReports}
|
||||
itemToString={(processInstanceReport: ProcessInstanceReport) => {
|
||||
if (processInstanceReport) {
|
||||
return `${processInstanceReport.id} (${truncateString(
|
||||
processInstanceReport.display_name,
|
||||
20
|
||||
)})`;
|
||||
}
|
||||
return null;
|
||||
}}
|
||||
shouldFilterItem={shouldFilterProcessInstanceReport}
|
||||
placeholder="Choose a process instance perspective"
|
||||
titleText={titleText}
|
||||
selectedItem={selectedItem}
|
||||
/>
|
||||
) : null;
|
||||
}
|
|
@ -0,0 +1,66 @@
|
|||
import { useContext } from 'react';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import {
|
||||
Button,
|
||||
// @ts-ignore
|
||||
} from '@carbon/react';
|
||||
import { ProcessModel } from '../interfaces';
|
||||
import HttpService from '../services/HttpService';
|
||||
import ErrorContext from '../contexts/ErrorContext';
|
||||
import { modifyProcessIdentifierForPathParam } from '../helpers';
|
||||
|
||||
type OwnProps = {
|
||||
processModel: ProcessModel;
|
||||
onSuccessCallback: Function;
|
||||
className?: string;
|
||||
};
|
||||
|
||||
export default function ProcessInstanceRun({
|
||||
processModel,
|
||||
onSuccessCallback,
|
||||
className,
|
||||
}: OwnProps) {
|
||||
const navigate = useNavigate();
|
||||
const setErrorMessage = (useContext as any)(ErrorContext)[1];
|
||||
const modifiedProcessModelId = modifyProcessIdentifierForPathParam(
|
||||
processModel.id
|
||||
);
|
||||
|
||||
const onProcessInstanceRun = (processInstance: any) => {
|
||||
// FIXME: ensure that the task is actually for the current user as well
|
||||
const processInstanceId = (processInstance as any).id;
|
||||
const nextTask = (processInstance as any).next_task;
|
||||
if (nextTask && nextTask.state === 'READY') {
|
||||
navigate(`/tasks/${processInstanceId}/${nextTask.id}`);
|
||||
}
|
||||
onSuccessCallback(processInstance);
|
||||
};
|
||||
|
||||
const processModelRun = (processInstance: any) => {
|
||||
setErrorMessage(null);
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/process-instances/${processInstance.id}/run`,
|
||||
successCallback: onProcessInstanceRun,
|
||||
failureCallback: setErrorMessage,
|
||||
httpMethod: 'POST',
|
||||
});
|
||||
};
|
||||
|
||||
const processInstanceCreateAndRun = () => {
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/process-models/${modifiedProcessModelId}/process-instances`,
|
||||
successCallback: processModelRun,
|
||||
httpMethod: 'POST',
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<Button
|
||||
onClick={processInstanceCreateAndRun}
|
||||
variant="primary"
|
||||
className={className}
|
||||
>
|
||||
Run
|
||||
</Button>
|
||||
);
|
||||
}
|
|
@ -2,14 +2,9 @@ import { useState } from 'react';
|
|||
import { useNavigate } from 'react-router-dom';
|
||||
// @ts-ignore
|
||||
import { Button, ButtonSet, Form, Stack, TextInput } from '@carbon/react';
|
||||
import {
|
||||
getGroupFromModifiedModelId,
|
||||
modifyProcessModelPath,
|
||||
slugifyString,
|
||||
} from '../helpers';
|
||||
import { modifyProcessIdentifierForPathParam, slugifyString } from '../helpers';
|
||||
import HttpService from '../services/HttpService';
|
||||
import { ProcessModel } from '../interfaces';
|
||||
import ButtonWithConfirmation from './ButtonWithConfirmation';
|
||||
|
||||
type OwnProps = {
|
||||
mode: string;
|
||||
|
@ -29,41 +24,23 @@ export default function ProcessModelForm({
|
|||
useState<boolean>(false);
|
||||
const [displayNameInvalid, setDisplayNameInvalid] = useState<boolean>(false);
|
||||
const navigate = useNavigate();
|
||||
const modifiedProcessModelPath = modifyProcessModelPath(processModel.id);
|
||||
|
||||
const navigateToProcessModel = (result: ProcessModel) => {
|
||||
if ('id' in result) {
|
||||
const modifiedProcessModelPathFromResult = modifyProcessModelPath(
|
||||
result.id
|
||||
);
|
||||
const modifiedProcessModelPathFromResult =
|
||||
modifyProcessIdentifierForPathParam(result.id);
|
||||
navigate(`/admin/process-models/${modifiedProcessModelPathFromResult}`);
|
||||
}
|
||||
};
|
||||
|
||||
const navigateToProcessModels = (_result: any) => {
|
||||
navigate(
|
||||
`/admin/process-groups/${getGroupFromModifiedModelId(
|
||||
modifiedProcessModelPath
|
||||
)}`
|
||||
);
|
||||
};
|
||||
|
||||
const hasValidIdentifier = (identifierToCheck: string) => {
|
||||
return identifierToCheck.match(/^[a-z0-9][0-9a-z-]+[a-z0-9]$/);
|
||||
};
|
||||
|
||||
const deleteProcessModel = () => {
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/process-models/${modifiedProcessModelPath}`,
|
||||
successCallback: navigateToProcessModels,
|
||||
httpMethod: 'DELETE',
|
||||
});
|
||||
};
|
||||
|
||||
const handleFormSubmission = (event: any) => {
|
||||
event.preventDefault();
|
||||
let hasErrors = false;
|
||||
if (!hasValidIdentifier(processModel.id)) {
|
||||
if (mode === 'new' && !hasValidIdentifier(processModel.id)) {
|
||||
setIdentifierInvalid(true);
|
||||
hasErrors = true;
|
||||
}
|
||||
|
@ -74,7 +51,9 @@ export default function ProcessModelForm({
|
|||
if (hasErrors) {
|
||||
return;
|
||||
}
|
||||
const path = `/process-models/${modifiedProcessModelPath}`;
|
||||
const path = `/process-models/${modifyProcessIdentifierForPathParam(
|
||||
processGroupId || ''
|
||||
)}`;
|
||||
let httpMethod = 'POST';
|
||||
if (mode === 'edit') {
|
||||
httpMethod = 'PUT';
|
||||
|
@ -85,7 +64,7 @@ export default function ProcessModelForm({
|
|||
};
|
||||
if (mode === 'new') {
|
||||
Object.assign(postBody, {
|
||||
id: `${processGroupId}:${processModel.id}`,
|
||||
id: `${processGroupId}/${processModel.id}`,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -172,16 +151,6 @@ export default function ProcessModelForm({
|
|||
Submit
|
||||
</Button>,
|
||||
];
|
||||
if (mode === 'edit') {
|
||||
buttons.push(
|
||||
<ButtonWithConfirmation
|
||||
description={`Delete Process Model ${processModel.id}?`}
|
||||
onConfirmation={deleteProcessModel}
|
||||
buttonLabel="Delete"
|
||||
confirmButtonLabel="Delete"
|
||||
/>
|
||||
);
|
||||
}
|
||||
return <ButtonSet>{buttons}</ButtonSet>;
|
||||
};
|
||||
return (
|
||||
|
|
|
@ -0,0 +1,117 @@
|
|||
import { ReactElement, useEffect, useState } from 'react';
|
||||
import { Link, useSearchParams } from 'react-router-dom';
|
||||
import {
|
||||
Tile,
|
||||
// @ts-ignore
|
||||
} from '@carbon/react';
|
||||
import HttpService from '../services/HttpService';
|
||||
import { ProcessModel, ProcessInstance, ProcessGroup } from '../interfaces';
|
||||
import {
|
||||
modifyProcessIdentifierForPathParam,
|
||||
truncateString,
|
||||
} from '../helpers';
|
||||
import ProcessInstanceRun from './ProcessInstanceRun';
|
||||
|
||||
type OwnProps = {
|
||||
headerElement?: ReactElement;
|
||||
processGroup?: ProcessGroup;
|
||||
};
|
||||
|
||||
export default function ProcessModelListTiles({
|
||||
headerElement,
|
||||
processGroup,
|
||||
}: OwnProps) {
|
||||
const [searchParams] = useSearchParams();
|
||||
const [processModels, setProcessModels] = useState<ProcessModel[] | null>(
|
||||
null
|
||||
);
|
||||
const [processInstance, setProcessInstance] =
|
||||
useState<ProcessInstance | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const setProcessModelsFromResult = (result: any) => {
|
||||
setProcessModels(result.results);
|
||||
};
|
||||
// only allow 10 for now until we get the backend only returning certain models for user execution
|
||||
let queryParams = '?per_page=100';
|
||||
if (processGroup) {
|
||||
queryParams = `${queryParams}&process_group_identifier=${processGroup.id}`;
|
||||
}
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/process-models${queryParams}`,
|
||||
successCallback: setProcessModelsFromResult,
|
||||
});
|
||||
}, [searchParams, processGroup]);
|
||||
|
||||
const processInstanceRunResultTag = () => {
|
||||
if (processInstance) {
|
||||
return (
|
||||
<div className="alert alert-success" role="alert">
|
||||
<p>
|
||||
Process Instance {processInstance.id} kicked off (
|
||||
<Link
|
||||
to={`/admin/process-models/${modifyProcessIdentifierForPathParam(
|
||||
processInstance.process_model_identifier
|
||||
)}/process-instances/${processInstance.id}`}
|
||||
data-qa="process-instance-show-link"
|
||||
>
|
||||
view
|
||||
</Link>
|
||||
).
|
||||
</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const processModelsDisplayArea = () => {
|
||||
let displayText = null;
|
||||
if (processModels && processModels.length > 0) {
|
||||
displayText = (processModels || []).map((row: ProcessModel) => {
|
||||
return (
|
||||
<Tile
|
||||
id={`process-model-tile-${row.id}`}
|
||||
className="tile-process-group"
|
||||
href={`/admin/process-models/${modifyProcessIdentifierForPathParam(
|
||||
row.id
|
||||
)}`}
|
||||
>
|
||||
<div className="tile-process-group-content-container">
|
||||
<div className="tile-title-top">{row.display_name}</div>
|
||||
<p className="tile-description">
|
||||
{truncateString(row.description || '', 100)}
|
||||
</p>
|
||||
<ProcessInstanceRun
|
||||
processModel={row}
|
||||
onSuccessCallback={setProcessInstance}
|
||||
className="tile-pin-bottom"
|
||||
/>
|
||||
</div>
|
||||
</Tile>
|
||||
);
|
||||
});
|
||||
} else {
|
||||
displayText = <p>No Models To Display</p>;
|
||||
}
|
||||
return displayText;
|
||||
};
|
||||
|
||||
const processModelArea = () => {
|
||||
if (processModels && processModels.length > 0) {
|
||||
return (
|
||||
<>
|
||||
{headerElement}
|
||||
{processInstanceRunResultTag()}
|
||||
{processModelsDisplayArea()}
|
||||
</>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
if (processModels) {
|
||||
return <>{processModelArea()}</>;
|
||||
}
|
||||
return null;
|
||||
}
|
|
@ -35,7 +35,7 @@ export default function ProcessModelSearch({
|
|||
if (processModel) {
|
||||
return `${processModel.id} (${truncateString(
|
||||
processModel.display_name,
|
||||
20
|
||||
75
|
||||
)})`;
|
||||
}
|
||||
return null;
|
||||
|
|
|
@ -41,7 +41,7 @@ export default function ProcessSearch({
|
|||
if (process) {
|
||||
return `${process.display_name} (${truncateString(
|
||||
process.identifier,
|
||||
20
|
||||
75
|
||||
)})`;
|
||||
}
|
||||
return null;
|
||||
|
|
|
@ -429,7 +429,7 @@ export default function ReactDiagramEditor({
|
|||
fetch(urlToUse)
|
||||
.then((response) => response.text())
|
||||
.then((text) => {
|
||||
const processId = `Proccess_${makeid(7)}`;
|
||||
const processId = `Process_${makeid(7)}`;
|
||||
const newText = text.replace('{{PROCESS_ID}}', processId);
|
||||
setDiagramXMLString(newText);
|
||||
})
|
||||
|
|
|
@ -6,7 +6,7 @@ import PaginationForTable from './PaginationForTable';
|
|||
import {
|
||||
convertSecondsToFormattedDateTime,
|
||||
getPageInfoFromSearchParams,
|
||||
modifyProcessModelPath,
|
||||
modifyProcessIdentifierForPathParam,
|
||||
} from '../helpers';
|
||||
import HttpService from '../services/HttpService';
|
||||
import { PaginationObject } from '../interfaces';
|
||||
|
@ -40,9 +40,8 @@ export default function MyOpenProcesses() {
|
|||
const rows = tasks.map((row) => {
|
||||
const rowToUse = row as any;
|
||||
const taskUrl = `/tasks/${rowToUse.process_instance_id}/${rowToUse.task_id}`;
|
||||
const modifiedProcessModelIdentifier = modifyProcessModelPath(
|
||||
rowToUse.process_model_identifier
|
||||
);
|
||||
const modifiedProcessModelIdentifier =
|
||||
modifyProcessIdentifierForPathParam(rowToUse.process_model_identifier);
|
||||
return (
|
||||
<tr key={rowToUse.id}>
|
||||
<td>
|
||||
|
|
|
@ -6,7 +6,7 @@ import PaginationForTable from './PaginationForTable';
|
|||
import {
|
||||
convertSecondsToFormattedDateTime,
|
||||
getPageInfoFromSearchParams,
|
||||
modifyProcessModelPath,
|
||||
modifyProcessIdentifierForPathParam,
|
||||
} from '../helpers';
|
||||
import HttpService from '../services/HttpService';
|
||||
import { PaginationObject } from '../interfaces';
|
||||
|
@ -39,9 +39,8 @@ export default function TasksWaitingForMe() {
|
|||
const rows = tasks.map((row) => {
|
||||
const rowToUse = row as any;
|
||||
const taskUrl = `/tasks/${rowToUse.process_instance_id}/${rowToUse.task_id}`;
|
||||
const modifiedProcessModelIdentifier = modifyProcessModelPath(
|
||||
rowToUse.process_model_identifier
|
||||
);
|
||||
const modifiedProcessModelIdentifier =
|
||||
modifyProcessIdentifierForPathParam(rowToUse.process_model_identifier);
|
||||
return (
|
||||
<tr key={rowToUse.id}>
|
||||
<td>
|
||||
|
|
|
@ -6,7 +6,7 @@ import PaginationForTable from './PaginationForTable';
|
|||
import {
|
||||
convertSecondsToFormattedDateTime,
|
||||
getPageInfoFromSearchParams,
|
||||
modifyProcessModelPath,
|
||||
modifyProcessIdentifierForPathParam,
|
||||
} from '../helpers';
|
||||
import HttpService from '../services/HttpService';
|
||||
import { PaginationObject } from '../interfaces';
|
||||
|
@ -40,9 +40,8 @@ export default function TasksForWaitingForMyGroups() {
|
|||
const rows = tasks.map((row) => {
|
||||
const rowToUse = row as any;
|
||||
const taskUrl = `/tasks/${rowToUse.process_instance_id}/${rowToUse.task_id}`;
|
||||
const modifiedProcessModelIdentifier = modifyProcessModelPath(
|
||||
rowToUse.process_model_identifier
|
||||
);
|
||||
const modifiedProcessModelIdentifier =
|
||||
modifyProcessIdentifierForPathParam(rowToUse.process_model_identifier);
|
||||
return (
|
||||
<tr key={rowToUse.id}>
|
||||
<td>
|
||||
|
|
|
@ -18,5 +18,6 @@ export const PROCESS_STATUSES = [
|
|||
|
||||
// with time: yyyy-MM-dd HH:mm:ss
|
||||
export const DATE_TIME_FORMAT = 'yyyy-MM-dd HH:mm:ss';
|
||||
export const TIME_FORMAT_HOURS_MINUTES = 'HH:mm';
|
||||
export const DATE_FORMAT = 'yyyy-MM-dd';
|
||||
export const DATE_FORMAT_CARBON = 'Y-m-d';
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { convertSecondsToFormattedDate, slugifyString } from './helpers';
|
||||
import { convertSecondsToFormattedDateString, slugifyString } from './helpers';
|
||||
|
||||
test('it can slugify a string', () => {
|
||||
expect(slugifyString('hello---world_ and then Some such-')).toEqual(
|
||||
|
@ -7,6 +7,6 @@ test('it can slugify a string', () => {
|
|||
});
|
||||
|
||||
test('it can keep the correct date when converting seconds to date', () => {
|
||||
const dateString = convertSecondsToFormattedDate(1666325400);
|
||||
const dateString = convertSecondsToFormattedDateString(1666325400);
|
||||
expect(dateString).toEqual('2022-10-21');
|
||||
});
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
import { format } from 'date-fns';
|
||||
import { DATE_TIME_FORMAT, DATE_FORMAT } from './config';
|
||||
import {
|
||||
DATE_TIME_FORMAT,
|
||||
DATE_FORMAT,
|
||||
TIME_FORMAT_HOURS_MINUTES,
|
||||
} from './config';
|
||||
import {
|
||||
DEFAULT_PER_PAGE,
|
||||
DEFAULT_PAGE,
|
||||
|
@ -42,27 +46,72 @@ export const convertDateToSeconds = (
|
|||
return null;
|
||||
};
|
||||
|
||||
export const convertDateObjectToFormattedString = (dateObject: Date) => {
|
||||
if (dateObject) {
|
||||
return format(dateObject, DATE_FORMAT);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const convertDateAndTimeStringsToDate = (
|
||||
dateString: string,
|
||||
timeString: string
|
||||
) => {
|
||||
if (dateString && timeString) {
|
||||
return new Date(`${dateString}T${timeString}`);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const convertDateAndTimeStringsToSeconds = (
|
||||
dateString: string,
|
||||
timeString: string
|
||||
) => {
|
||||
const dateObject = convertDateAndTimeStringsToDate(dateString, timeString);
|
||||
if (dateObject) {
|
||||
return convertDateToSeconds(dateObject);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const convertStringToDate = (dateString: string) => {
|
||||
if (dateString) {
|
||||
// add midnight time to the date so it c uses the correct date
|
||||
// after converting to timezone
|
||||
return new Date(`${dateString}T00:10:00`);
|
||||
return convertDateAndTimeStringsToSeconds(dateString, '00:10:00');
|
||||
};
|
||||
|
||||
export const convertSecondsToDateObject = (seconds: number) => {
|
||||
if (seconds) {
|
||||
return new Date(seconds * 1000);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const convertSecondsToFormattedDateTime = (seconds: number) => {
|
||||
if (seconds) {
|
||||
const dateObject = new Date(seconds * 1000);
|
||||
const dateObject = convertSecondsToDateObject(seconds);
|
||||
if (dateObject) {
|
||||
return format(dateObject, DATE_TIME_FORMAT);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const convertSecondsToFormattedDate = (seconds: number) => {
|
||||
if (seconds) {
|
||||
const dateObject = new Date(seconds * 1000);
|
||||
return format(dateObject, DATE_FORMAT);
|
||||
export const convertDateObjectToFormattedHoursMinutes = (dateObject: Date) => {
|
||||
if (dateObject) {
|
||||
return format(dateObject, TIME_FORMAT_HOURS_MINUTES);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const convertSecondsToFormattedTimeHoursMinutes = (seconds: number) => {
|
||||
const dateObject = convertSecondsToDateObject(seconds);
|
||||
if (dateObject) {
|
||||
return convertDateObjectToFormattedHoursMinutes(dateObject);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const convertSecondsToFormattedDateString = (seconds: number) => {
|
||||
const dateObject = convertSecondsToDateObject(seconds);
|
||||
if (dateObject) {
|
||||
return convertDateObjectToFormattedString(dateObject);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
@ -125,18 +174,18 @@ export const getProcessModelFullIdentifierFromSearchParams = (
|
|||
// https://stackoverflow.com/a/71352046/6090676
|
||||
export const truncateString = (text: string, len: number) => {
|
||||
if (text.length > len && text.length > 0) {
|
||||
return `${text.split(' ').slice(0, len).join(' ')} ...`;
|
||||
return `${text.split('').slice(0, len).join('')} ...`;
|
||||
}
|
||||
return text;
|
||||
};
|
||||
|
||||
// Because of limitations in the way openapi defines parameters, we have to modify process models ids
|
||||
// which are basically paths to the models
|
||||
export const modifyProcessModelPath = (path: string) => {
|
||||
export const modifyProcessIdentifierForPathParam = (path: string) => {
|
||||
return path.replace(/\//g, ':') || '';
|
||||
};
|
||||
|
||||
export const unModifyProcessModelPath = (path: string) => {
|
||||
export const unModifyProcessIdentifierForPathParam = (path: string) => {
|
||||
return path.replace(/:/g, '/') || '';
|
||||
};
|
||||
|
||||
|
@ -148,3 +197,16 @@ export const getGroupFromModifiedModelId = (modifiedId: string) => {
|
|||
export const splitProcessModelId = (processModelId: string) => {
|
||||
return processModelId.split('/');
|
||||
};
|
||||
|
||||
export const refreshAtInterval = (
|
||||
interval: number,
|
||||
timeout: number,
|
||||
func: Function
|
||||
) => {
|
||||
const intervalRef = setInterval(() => func(), interval * 1000);
|
||||
const timeoutRef = setTimeout(
|
||||
() => clearInterval(intervalRef),
|
||||
timeout * 1000
|
||||
);
|
||||
return [intervalRef, timeoutRef];
|
||||
};
|
||||
|
|
|
@ -3,6 +3,7 @@ import { useParams } from 'react-router-dom';
|
|||
export const useUriListForPermissions = () => {
|
||||
const params = useParams();
|
||||
const targetUris = {
|
||||
authenticationListPath: `/v1.0/authentications`,
|
||||
messageInstanceListPath: '/v1.0/messages',
|
||||
processGroupListPath: '/v1.0/process-groups',
|
||||
processGroupShowPath: `/v1.0/process-groups/${params.process_group_id}`,
|
||||
|
@ -12,6 +13,7 @@ export const useUriListForPermissions = () => {
|
|||
processModelFileCreatePath: `/v1.0/process-models/${params.process_model_id}/files`,
|
||||
processModelFileShowPath: `/v1.0/process-models/${params.process_model_id}/files/${params.file_name}`,
|
||||
processModelShowPath: `/v1.0/process-models/${params.process_model_id}`,
|
||||
secretListPath: `/v1.0/secrets`,
|
||||
};
|
||||
|
||||
return { targetUris };
|
||||
|
|
|
@ -5,21 +5,26 @@
|
|||
color: white;
|
||||
}
|
||||
|
||||
h1{
|
||||
height: 36px;
|
||||
font-family: 'IBM Plex Sans';
|
||||
font-style: normal;
|
||||
/* defaults to 3rem, which isn't long sufficient for "elizabeth" */
|
||||
.cds--header__action.username-header-text {
|
||||
width: 5rem;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-weight: 400;
|
||||
font-size: 28px;
|
||||
line-height: 36px;
|
||||
color: #161616;
|
||||
flex: none;
|
||||
order: 0;
|
||||
align-self: stretch;
|
||||
flex-grow: 0;
|
||||
margin-bottom: 1em
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-weight: 400;
|
||||
font-size: 20px;
|
||||
line-height: 28px;
|
||||
color: #161616;
|
||||
}
|
||||
|
||||
.span-tag {
|
||||
color: black;
|
||||
}
|
||||
|
@ -71,7 +76,7 @@ h1{
|
|||
}
|
||||
code {
|
||||
font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New',
|
||||
monospace;
|
||||
monospace;
|
||||
}
|
||||
|
||||
.app-logo {
|
||||
|
@ -182,9 +187,64 @@ h1.with-icons {
|
|||
}
|
||||
|
||||
/* Json Web Form CSS Fix - Bootstrap now requries that each li have a "list-inline-item." Also have a PR
|
||||
in on this with the react-jsonschema-form repo. This is just a patch fix to allow date inputs to layout a little more cleanly */
|
||||
in on this with the react-jsonschema-form repo. This is just a patch fix to allow date inputs to layout a little more cleanly */
|
||||
.list-inline>li {
|
||||
display: inline-block;
|
||||
padding-right: 5px;
|
||||
padding-left: 5px;
|
||||
}
|
||||
|
||||
.cds--tile.tile-process-group {
|
||||
padding: 0px;
|
||||
margin: 16px;
|
||||
width: 354px;
|
||||
height: 264px;
|
||||
background: #F4F4F4;
|
||||
order: 1;
|
||||
float: left;
|
||||
}
|
||||
|
||||
.tile-process-group-content-container {
|
||||
width: 354px;
|
||||
height: 264px;
|
||||
padding: 1em;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.tile-process-group-display-name {
|
||||
margin-top: 2em;
|
||||
margin-bottom: 1em;
|
||||
font-size: 20px;
|
||||
line-height: 28px;
|
||||
color: #161616;
|
||||
order: 0;
|
||||
}
|
||||
|
||||
.tile-title-top {
|
||||
margin-bottom: 2em;
|
||||
font-size: 20px;
|
||||
line-height: 28px;
|
||||
color: #161616;
|
||||
order: 0;
|
||||
}
|
||||
|
||||
.tile-description {
|
||||
font-size: 14px;
|
||||
line-height: 20px;
|
||||
letter-spacing: 0.16px;
|
||||
color: #161616;
|
||||
order: 1;
|
||||
}
|
||||
|
||||
.tile-process-group-children-count {
|
||||
font-size: 14px;
|
||||
line-height: 20px;
|
||||
letter-spacing: 0.16px;
|
||||
color: #161616;
|
||||
order: 1;
|
||||
}
|
||||
|
||||
.tile-pin-bottom {
|
||||
position: absolute;
|
||||
bottom: 1em;
|
||||
}
|
||||
|
|
|
@ -11,12 +11,6 @@ export interface RecentProcessModel {
|
|||
processModelDisplayName: string;
|
||||
}
|
||||
|
||||
export interface ProcessGroup {
|
||||
id: string;
|
||||
display_name: string;
|
||||
description?: string | null;
|
||||
}
|
||||
|
||||
export interface ProcessReference {
|
||||
id: string; // The unique id of the process or decision table.
|
||||
name: string; // The process or decision Display name.
|
||||
|
@ -42,6 +36,16 @@ export interface ProcessFile {
|
|||
file_contents?: string;
|
||||
}
|
||||
|
||||
export interface ProcessInstance {
|
||||
id: number;
|
||||
process_model_identifier: string;
|
||||
}
|
||||
|
||||
export interface ProcessInstanceReport {
|
||||
id: string;
|
||||
display_name: string;
|
||||
}
|
||||
|
||||
export interface ProcessModel {
|
||||
id: string;
|
||||
description: string;
|
||||
|
@ -50,6 +54,14 @@ export interface ProcessModel {
|
|||
files: ProcessFile[];
|
||||
}
|
||||
|
||||
export interface ProcessGroup {
|
||||
id: string;
|
||||
display_name: string;
|
||||
description?: string | null;
|
||||
process_models?: ProcessModel[];
|
||||
process_groups?: ProcessGroup[];
|
||||
}
|
||||
|
||||
// tuple of display value and URL
|
||||
export type HotCrumbItem = [displayValue: string, url?: string];
|
||||
|
||||
|
@ -95,3 +107,11 @@ export interface PermissionCheckResult {
|
|||
export interface PermissionCheckResponseBody {
|
||||
results: PermissionCheckResult;
|
||||
}
|
||||
|
||||
export interface FormField {
|
||||
id: string;
|
||||
title: string;
|
||||
required: boolean;
|
||||
type: string;
|
||||
enum: string[];
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ import ErrorContext from '../contexts/ErrorContext';
|
|||
import ProcessInstanceLogList from './ProcessInstanceLogList';
|
||||
import MessageInstanceList from './MessageInstanceList';
|
||||
import Configuration from './Configuration';
|
||||
import JsonSchemaFormBuilder from './JsonSchemaFormBuilder';
|
||||
|
||||
export default function AdminRoutes() {
|
||||
const location = useLocation();
|
||||
|
@ -108,6 +109,10 @@ export default function AdminRoutes() {
|
|||
<Route path="process-instances" element={<ProcessInstanceList />} />
|
||||
<Route path="messages" element={<MessageInstanceList />} />
|
||||
<Route path="configuration/*" element={<Configuration />} />
|
||||
<Route
|
||||
path="process-models/:process_model_id/form-builder"
|
||||
element={<JsonSchemaFormBuilder />}
|
||||
/>
|
||||
</Routes>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -2,11 +2,15 @@ import { useContext, useEffect, useState } from 'react';
|
|||
import { Route, Routes, useLocation, useNavigate } from 'react-router-dom';
|
||||
// @ts-ignore
|
||||
import { Tabs, TabList, Tab } from '@carbon/react';
|
||||
import { Can } from '@casl/react';
|
||||
import ErrorContext from '../contexts/ErrorContext';
|
||||
import SecretList from './SecretList';
|
||||
import SecretNew from './SecretNew';
|
||||
import SecretShow from './SecretShow';
|
||||
import AuthenticationList from './AuthenticationList';
|
||||
import { useUriListForPermissions } from '../hooks/UriListForPermissions';
|
||||
import { PermissionsToCheck } from '../interfaces';
|
||||
import { usePermissionFetcher } from '../hooks/PermissionService';
|
||||
|
||||
export default function Configuration() {
|
||||
const location = useLocation();
|
||||
|
@ -14,6 +18,13 @@ export default function Configuration() {
|
|||
const [selectedTabIndex, setSelectedTabIndex] = useState<number>(0);
|
||||
const navigate = useNavigate();
|
||||
|
||||
const { targetUris } = useUriListForPermissions();
|
||||
const permissionRequestData: PermissionsToCheck = {
|
||||
[targetUris.authenticationListPath]: ['GET'],
|
||||
[targetUris.secretListPath]: ['GET'],
|
||||
};
|
||||
const { ability } = usePermissionFetcher(permissionRequestData);
|
||||
|
||||
useEffect(() => {
|
||||
setErrorMessage(null);
|
||||
let newSelectedTabIndex = 0;
|
||||
|
@ -27,12 +38,18 @@ export default function Configuration() {
|
|||
<>
|
||||
<Tabs selectedIndex={selectedTabIndex}>
|
||||
<TabList aria-label="List of tabs">
|
||||
<Tab onClick={() => navigate('/admin/configuration/secrets')}>
|
||||
Secrets
|
||||
</Tab>
|
||||
<Tab onClick={() => navigate('/admin/configuration/authentications')}>
|
||||
Authentications
|
||||
</Tab>
|
||||
<Can I="GET" a={targetUris.secretListPath} ability={ability}>
|
||||
<Tab onClick={() => navigate('/admin/configuration/secrets')}>
|
||||
Secrets
|
||||
</Tab>
|
||||
</Can>
|
||||
<Can I="GET" a={targetUris.authenticationListPath} ability={ability}>
|
||||
<Tab
|
||||
onClick={() => navigate('/admin/configuration/authentications')}
|
||||
>
|
||||
Authentications
|
||||
</Tab>
|
||||
</Can>
|
||||
</TabList>
|
||||
</Tabs>
|
||||
<br />
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
import ProcessModelListTiles from '../components/ProcessModelListTiles';
|
||||
|
||||
export default function CreateNewInstance() {
|
||||
return (
|
||||
<ProcessModelListTiles
|
||||
headerElement={<h1>Process models available to you</h1>}
|
||||
/>
|
||||
);
|
||||
}
|
|
@ -7,6 +7,7 @@ import ErrorContext from '../contexts/ErrorContext';
|
|||
import MyTasks from './MyTasks';
|
||||
import GroupedTasks from './GroupedTasks';
|
||||
import CompletedInstances from './CompletedInstances';
|
||||
import CreateNewInstance from './CreateNewInstance';
|
||||
|
||||
export default function HomePageRoutes() {
|
||||
const location = useLocation();
|
||||
|
@ -21,28 +22,45 @@ export default function HomePageRoutes() {
|
|||
newSelectedTabIndex = 1;
|
||||
} else if (location.pathname.match(/^\/tasks\/completed-instances\b/)) {
|
||||
newSelectedTabIndex = 2;
|
||||
} else if (location.pathname.match(/^\/tasks\/create-new-instance\b/)) {
|
||||
newSelectedTabIndex = 3;
|
||||
}
|
||||
setSelectedTabIndex(newSelectedTabIndex);
|
||||
}, [location, setErrorMessage]);
|
||||
|
||||
const renderTabs = () => {
|
||||
if (location.pathname.match(/^\/tasks\/\d+\/\b/)) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<>
|
||||
<Tabs selectedIndex={selectedTabIndex}>
|
||||
<TabList aria-label="List of tabs">
|
||||
<Tab onClick={() => navigate('/tasks/my-tasks')}>My Tasks</Tab>
|
||||
<Tab onClick={() => navigate('/tasks/grouped')}>Grouped Tasks</Tab>
|
||||
<Tab onClick={() => navigate('/tasks/completed-instances')}>
|
||||
Completed Instances
|
||||
</Tab>
|
||||
<Tab onClick={() => navigate('/tasks/create-new-instance')}>
|
||||
Create New Instance +
|
||||
</Tab>
|
||||
</TabList>
|
||||
</Tabs>
|
||||
<br />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<Tabs selectedIndex={selectedTabIndex}>
|
||||
<TabList aria-label="List of tabs">
|
||||
<Tab onClick={() => navigate('/tasks/my-tasks')}>My Tasks</Tab>
|
||||
<Tab onClick={() => navigate('/tasks/grouped')}>Grouped Tasks</Tab>
|
||||
<Tab onClick={() => navigate('/tasks/completed-instances')}>
|
||||
Completed Instances
|
||||
</Tab>
|
||||
</TabList>
|
||||
</Tabs>
|
||||
<br />
|
||||
{renderTabs()}
|
||||
<Routes>
|
||||
<Route path="/" element={<MyTasks />} />
|
||||
<Route path="my-tasks" element={<MyTasks />} />
|
||||
<Route path=":process_instance_id/:task_id" element={<TaskShow />} />
|
||||
<Route path="grouped" element={<GroupedTasks />} />
|
||||
<Route path="completed-instances" element={<CompletedInstances />} />
|
||||
<Route path="create-new-instance" element={<CreateNewInstance />} />
|
||||
</Routes>
|
||||
</>
|
||||
);
|
||||
|
|
|
@ -0,0 +1,250 @@
|
|||
import { useEffect, useState } from 'react';
|
||||
// @ts-ignore
|
||||
import { Button, Select, SelectItem, TextInput } from '@carbon/react';
|
||||
import { useParams } from 'react-router-dom';
|
||||
import { FormField } from '../interfaces';
|
||||
import { modifyProcessIdentifierForPathParam, slugifyString } from '../helpers';
|
||||
import HttpService from '../services/HttpService';
|
||||
|
||||
export default function JsonSchemaFormBuilder() {
|
||||
const params = useParams();
|
||||
const formFieldTypes = ['textbox', 'checkbox', 'select'];
|
||||
|
||||
const [formTitle, setFormTitle] = useState<string>('');
|
||||
const [formDescription, setFormDescription] = useState<string>('');
|
||||
const [formId, setFormId] = useState<string>('');
|
||||
const [formFields, setFormFields] = useState<FormField[]>([]);
|
||||
const [showNewFormField, setShowNewFormField] = useState<boolean>(false);
|
||||
const [formFieldSelectOptions, setFormFieldSelectOptions] =
|
||||
useState<string>('');
|
||||
const [formIdHasBeenUpdatedByUser, setFormIdHasBeenUpdatedByUser] =
|
||||
useState<boolean>(false);
|
||||
const [formFieldIdHasBeenUpdatedByUser, setFormFieldIdHasBeenUpdatedByUser] =
|
||||
useState<boolean>(false);
|
||||
const [showFormFieldSelectTextField, setShowFormFieldSelectTextField] =
|
||||
useState<boolean>(false);
|
||||
|
||||
const [formFieldId, setFormFieldId] = useState<string>('');
|
||||
const [formFieldTitle, setFormFieldTitle] = useState<string>('');
|
||||
const [formFieldType, setFormFieldType] = useState<string>('');
|
||||
|
||||
const modifiedProcessModelId = modifyProcessIdentifierForPathParam(
|
||||
`${params.process_model_id}`
|
||||
);
|
||||
|
||||
useEffect(() => {}, []);
|
||||
|
||||
const renderFormJson = () => {
|
||||
const formJson = {
|
||||
title: formTitle,
|
||||
description: formDescription,
|
||||
properties: {},
|
||||
required: [],
|
||||
};
|
||||
|
||||
formFields.forEach((formField: FormField) => {
|
||||
let jsonSchemaFieldType = 'string';
|
||||
if (formField.type === 'checkbox') {
|
||||
jsonSchemaFieldType = 'boolean';
|
||||
}
|
||||
const formJsonObject: any = {
|
||||
type: jsonSchemaFieldType,
|
||||
title: formField.title,
|
||||
};
|
||||
|
||||
if (formField.type === 'select') {
|
||||
formJsonObject.enum = formField.enum;
|
||||
}
|
||||
(formJson.properties as any)[formField.id] = formJsonObject;
|
||||
});
|
||||
|
||||
return JSON.stringify(formJson, null, 2);
|
||||
};
|
||||
|
||||
const renderFormUiJson = () => {
|
||||
const uiOrder = formFields.map((formField: FormField) => {
|
||||
return formField.id;
|
||||
});
|
||||
return JSON.stringify({ 'ui:order': uiOrder }, null, 2);
|
||||
};
|
||||
|
||||
const onFormFieldTitleChange = (newFormFieldTitle: string) => {
|
||||
console.log('newFormFieldTitle', newFormFieldTitle);
|
||||
console.log(
|
||||
'setFormFieldIdHasBeenUpdatedByUser',
|
||||
formFieldIdHasBeenUpdatedByUser
|
||||
);
|
||||
if (!formFieldIdHasBeenUpdatedByUser) {
|
||||
setFormFieldId(slugifyString(newFormFieldTitle));
|
||||
}
|
||||
setFormFieldTitle(newFormFieldTitle);
|
||||
};
|
||||
|
||||
const onFormTitleChange = (newFormTitle: string) => {
|
||||
if (!formIdHasBeenUpdatedByUser) {
|
||||
setFormId(slugifyString(newFormTitle));
|
||||
}
|
||||
setFormTitle(newFormTitle);
|
||||
};
|
||||
|
||||
const addFormField = () => {
|
||||
const newFormField: FormField = {
|
||||
id: formFieldId,
|
||||
title: formFieldTitle,
|
||||
required: false,
|
||||
type: formFieldType,
|
||||
enum: formFieldSelectOptions.split(','),
|
||||
};
|
||||
|
||||
setFormFieldIdHasBeenUpdatedByUser(false);
|
||||
setShowNewFormField(false);
|
||||
setFormFields([...formFields, newFormField]);
|
||||
};
|
||||
|
||||
const handleFormFieldTypeChange = (event: any) => {
|
||||
setFormFieldType(event.srcElement.value);
|
||||
|
||||
if (event.srcElement.value === 'select') {
|
||||
setShowFormFieldSelectTextField(true);
|
||||
} else {
|
||||
setShowFormFieldSelectTextField(false);
|
||||
}
|
||||
};
|
||||
|
||||
const newFormFieldComponent = () => {
|
||||
if (showNewFormField) {
|
||||
return (
|
||||
<>
|
||||
<TextInput
|
||||
id="form-field-title"
|
||||
name="title"
|
||||
labelText="Title"
|
||||
value={formFieldTitle}
|
||||
onChange={(event: any) => {
|
||||
onFormFieldTitleChange(event.srcElement.value);
|
||||
}}
|
||||
/>
|
||||
<TextInput
|
||||
id="json-form-field-id"
|
||||
name="id"
|
||||
labelText="ID"
|
||||
value={formFieldId}
|
||||
onChange={(event: any) => {
|
||||
setFormFieldIdHasBeenUpdatedByUser(true);
|
||||
setFormFieldId(event.srcElement.value);
|
||||
}}
|
||||
/>
|
||||
<Select
|
||||
id="form-field-type"
|
||||
labelText="Type"
|
||||
onChange={handleFormFieldTypeChange}
|
||||
>
|
||||
{formFieldTypes.map((fft: string) => {
|
||||
return <SelectItem text={fft} value={fft} />;
|
||||
})}
|
||||
</Select>
|
||||
{showFormFieldSelectTextField ? (
|
||||
<TextInput
|
||||
id="json-form-field-select-options"
|
||||
name="select-options"
|
||||
labelText="Select Options"
|
||||
onChange={(event: any) => {
|
||||
setFormFieldSelectOptions(event.srcElement.value);
|
||||
}}
|
||||
/>
|
||||
) : null}
|
||||
<Button onClick={addFormField}>Add Field</Button>
|
||||
</>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const formFieldArea = () => {
|
||||
if (formFields.length > 0) {
|
||||
return formFields.map((formField: FormField) => {
|
||||
return <p>Form Field: {formField.id}</p>;
|
||||
});
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const handleSaveCallback = (result: any) => {
|
||||
console.log('result', result);
|
||||
};
|
||||
|
||||
const uploadFile = (file: File) => {
|
||||
const url = `/process-models/${modifiedProcessModelId}/files`;
|
||||
const httpMethod = 'POST';
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
formData.append('fileName', file.name);
|
||||
|
||||
HttpService.makeCallToBackend({
|
||||
path: url,
|
||||
successCallback: handleSaveCallback,
|
||||
httpMethod,
|
||||
postBody: formData,
|
||||
});
|
||||
};
|
||||
|
||||
const saveFile = () => {
|
||||
const formJsonFileName = `${formId}-schema.json`;
|
||||
const formUiJsonFileName = `${formId}-uischema.json`;
|
||||
|
||||
uploadFile(new File([renderFormJson()], formJsonFileName));
|
||||
uploadFile(new File([renderFormUiJson()], formUiJsonFileName));
|
||||
};
|
||||
|
||||
const jsonFormArea = () => {
|
||||
return (
|
||||
<>
|
||||
<Button onClick={saveFile}>Save</Button>
|
||||
<TextInput
|
||||
id="json-form-title"
|
||||
name="title"
|
||||
labelText="Title"
|
||||
value={formTitle}
|
||||
onChange={(event: any) => {
|
||||
onFormTitleChange(event.srcElement.value);
|
||||
}}
|
||||
/>
|
||||
<TextInput
|
||||
id="json-form-id"
|
||||
name="id"
|
||||
labelText="ID"
|
||||
value={formId}
|
||||
onChange={(event: any) => {
|
||||
setFormIdHasBeenUpdatedByUser(true);
|
||||
setFormId(event.srcElement.value);
|
||||
}}
|
||||
/>
|
||||
<TextInput
|
||||
id="form-description"
|
||||
name="description"
|
||||
labelText="Description"
|
||||
value={formDescription}
|
||||
onChange={(event: any) => {
|
||||
setFormDescription(event.srcElement.value);
|
||||
}}
|
||||
/>
|
||||
<Button
|
||||
onClick={() => {
|
||||
setFormFieldId('');
|
||||
setFormFieldTitle('');
|
||||
setFormFieldType('');
|
||||
setFormFieldSelectOptions('');
|
||||
setShowFormFieldSelectTextField(false);
|
||||
setShowNewFormField(true);
|
||||
}}
|
||||
>
|
||||
New Field
|
||||
</Button>
|
||||
{formFieldArea()}
|
||||
{newFormFieldComponent()}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
return <>{jsonFormArea()}</>;
|
||||
}
|
|
@ -5,10 +5,10 @@ import { Link, useParams, useSearchParams } from 'react-router-dom';
|
|||
import PaginationForTable from '../components/PaginationForTable';
|
||||
import ProcessBreadcrumb from '../components/ProcessBreadcrumb';
|
||||
import {
|
||||
convertSecondsToFormattedDate,
|
||||
convertSecondsToFormattedDateString,
|
||||
getPageInfoFromSearchParams,
|
||||
modifyProcessModelPath,
|
||||
unModifyProcessModelPath,
|
||||
modifyProcessIdentifierForPathParam,
|
||||
unModifyProcessIdentifierForPathParam,
|
||||
} from '../helpers';
|
||||
import HttpService from '../services/HttpService';
|
||||
|
||||
|
@ -46,7 +46,7 @@ export default function MessageInstanceList() {
|
|||
<td>
|
||||
<Link
|
||||
data-qa="process-model-show-link"
|
||||
to={`/admin/process-models/${modifyProcessModelPath(
|
||||
to={`/admin/process-models/${modifyProcessIdentifierForPathParam(
|
||||
rowToUse.process_model_identifier
|
||||
)}`}
|
||||
>
|
||||
|
@ -56,7 +56,7 @@ export default function MessageInstanceList() {
|
|||
<td>
|
||||
<Link
|
||||
data-qa="process-instance-show-link"
|
||||
to={`/admin/process-models/${modifyProcessModelPath(
|
||||
to={`/admin/process-models/${modifyProcessIdentifierForPathParam(
|
||||
rowToUse.process_model_identifier
|
||||
)}/process-instances/${rowToUse.process_instance_id}`}
|
||||
>
|
||||
|
@ -68,7 +68,9 @@ export default function MessageInstanceList() {
|
|||
<td>{rowToUse.failure_cause || '-'}</td>
|
||||
<td>{rowToUse.status}</td>
|
||||
<td>
|
||||
{convertSecondsToFormattedDate(rowToUse.created_at_in_seconds)}
|
||||
{convertSecondsToFormattedDateString(
|
||||
rowToUse.created_at_in_seconds
|
||||
)}
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
|
@ -102,7 +104,7 @@ export default function MessageInstanceList() {
|
|||
['Process Groups', '/admin'],
|
||||
[
|
||||
`Process Model: ${params.process_model_id}`,
|
||||
`process_model:${unModifyProcessModelPath(
|
||||
`process_model:${unModifyProcessIdentifierForPathParam(
|
||||
searchParams.get('process_model_id') || ''
|
||||
)}:link`,
|
||||
],
|
||||
|
|
|
@ -5,12 +5,15 @@ import { Link, useSearchParams } from 'react-router-dom';
|
|||
import PaginationForTable from '../components/PaginationForTable';
|
||||
import {
|
||||
getPageInfoFromSearchParams,
|
||||
modifyProcessModelPath,
|
||||
modifyProcessIdentifierForPathParam,
|
||||
refreshAtInterval,
|
||||
} from '../helpers';
|
||||
import HttpService from '../services/HttpService';
|
||||
import { PaginationObject, RecentProcessModel } from '../interfaces';
|
||||
|
||||
const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5;
|
||||
const REFRESH_INTERVAL = 10;
|
||||
const REFRESH_TIMEOUT = 600;
|
||||
|
||||
export default function MyTasks() {
|
||||
const [searchParams] = useSearchParams();
|
||||
|
@ -18,18 +21,23 @@ export default function MyTasks() {
|
|||
const [pagination, setPagination] = useState<PaginationObject | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const { page, perPage } = getPageInfoFromSearchParams(
|
||||
searchParams,
|
||||
PER_PAGE_FOR_TASKS_ON_HOME_PAGE
|
||||
);
|
||||
const setTasksFromResult = (result: any) => {
|
||||
setTasks(result.results);
|
||||
setPagination(result.pagination);
|
||||
const getTasks = () => {
|
||||
const { page, perPage } = getPageInfoFromSearchParams(
|
||||
searchParams,
|
||||
PER_PAGE_FOR_TASKS_ON_HOME_PAGE
|
||||
);
|
||||
const setTasksFromResult = (result: any) => {
|
||||
setTasks(result.results);
|
||||
setPagination(result.pagination);
|
||||
};
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/tasks?per_page=${perPage}&page=${page}`,
|
||||
successCallback: setTasksFromResult,
|
||||
});
|
||||
};
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/tasks?per_page=${perPage}&page=${page}`,
|
||||
successCallback: setTasksFromResult,
|
||||
});
|
||||
|
||||
getTasks();
|
||||
refreshAtInterval(REFRESH_INTERVAL, REFRESH_TIMEOUT, getTasks);
|
||||
}, [searchParams]);
|
||||
|
||||
let recentProcessModels: RecentProcessModel[] = [];
|
||||
|
@ -42,9 +50,8 @@ export default function MyTasks() {
|
|||
const rows = tasks.map((row) => {
|
||||
const rowToUse = row as any;
|
||||
const taskUrl = `/tasks/${rowToUse.process_instance_id}/${rowToUse.id}`;
|
||||
const modifiedProcessModelIdentifier = modifyProcessModelPath(
|
||||
rowToUse.process_model_identifier
|
||||
);
|
||||
const modifiedProcessModelIdentifier =
|
||||
modifyProcessIdentifierForPathParam(rowToUse.process_model_identifier);
|
||||
return (
|
||||
<tr key={rowToUse.id}>
|
||||
<td>
|
||||
|
@ -102,7 +109,7 @@ export default function MyTasks() {
|
|||
const buildRecentProcessModelSection = () => {
|
||||
const rows = recentProcessModels.map((row) => {
|
||||
const rowToUse = row as any;
|
||||
const modifiedProcessModelId = modifyProcessModelPath(
|
||||
const modifiedProcessModelId = modifyProcessIdentifierForPathParam(
|
||||
rowToUse.processModelIdentifier
|
||||
);
|
||||
return (
|
||||
|
|
|
@ -1,35 +1,23 @@
|
|||
import { useEffect, useState } from 'react';
|
||||
import { Link, useNavigate, useSearchParams } from 'react-router-dom';
|
||||
import { useNavigate, useSearchParams } from 'react-router-dom';
|
||||
import {
|
||||
Button,
|
||||
Table,
|
||||
// @ts-ignore
|
||||
} from '@carbon/react';
|
||||
import { Can } from '@casl/react';
|
||||
import ProcessBreadcrumb from '../components/ProcessBreadcrumb';
|
||||
import PaginationForTable from '../components/PaginationForTable';
|
||||
import HttpService from '../services/HttpService';
|
||||
import {
|
||||
getPageInfoFromSearchParams,
|
||||
modifyProcessModelPath,
|
||||
} from '../helpers';
|
||||
import {
|
||||
CarbonComboBoxSelection,
|
||||
PermissionsToCheck,
|
||||
ProcessGroup,
|
||||
} from '../interfaces';
|
||||
import ProcessModelSearch from '../components/ProcessModelSearch';
|
||||
import { modifyProcessIdentifierForPathParam } from '../helpers';
|
||||
import { CarbonComboBoxSelection, PermissionsToCheck } from '../interfaces';
|
||||
import { useUriListForPermissions } from '../hooks/UriListForPermissions';
|
||||
import { usePermissionFetcher } from '../hooks/PermissionService';
|
||||
import ProcessModelSearch from '../components/ProcessModelSearch';
|
||||
import ProcessGroupListTiles from '../components/ProcessGroupListTiles';
|
||||
|
||||
// Example process group json
|
||||
// {'process_group_id': 'sure', 'display_name': 'Test Workflows', 'id': 'test_process_group'}
|
||||
export default function ProcessGroupList() {
|
||||
const navigate = useNavigate();
|
||||
const [searchParams] = useSearchParams();
|
||||
const navigate = useNavigate();
|
||||
|
||||
const [processGroups, setProcessGroups] = useState([]);
|
||||
const [pagination, setPagination] = useState(null);
|
||||
const [processModelAvailableItems, setProcessModelAvailableItems] = useState(
|
||||
[]
|
||||
);
|
||||
|
@ -41,10 +29,6 @@ export default function ProcessGroupList() {
|
|||
const { ability } = usePermissionFetcher(permissionRequestData);
|
||||
|
||||
useEffect(() => {
|
||||
const setProcessGroupsFromResult = (result: any) => {
|
||||
setProcessGroups(result.results);
|
||||
setPagination(result.pagination);
|
||||
};
|
||||
const processResultForProcessModels = (result: any) => {
|
||||
const selectionArray = result.results.map((item: any) => {
|
||||
const label = `${item.id}`;
|
||||
|
@ -53,73 +37,20 @@ export default function ProcessGroupList() {
|
|||
});
|
||||
setProcessModelAvailableItems(selectionArray);
|
||||
};
|
||||
|
||||
const { page, perPage } = getPageInfoFromSearchParams(searchParams);
|
||||
// for browsing
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/process-groups?per_page=${perPage}&page=${page}`,
|
||||
successCallback: setProcessGroupsFromResult,
|
||||
});
|
||||
// for search box
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/process-models?per_page=1000`,
|
||||
path: `/process-models?per_page=1000&recursive=true`,
|
||||
successCallback: processResultForProcessModels,
|
||||
});
|
||||
}, [searchParams]);
|
||||
|
||||
const buildTable = () => {
|
||||
const rows = processGroups.map((row: ProcessGroup) => {
|
||||
return (
|
||||
<tr key={(row as any).id}>
|
||||
<td>
|
||||
<Link
|
||||
to={`/admin/process-groups/${(row as any).id}`}
|
||||
title={(row as any).id}
|
||||
>
|
||||
{(row as any).display_name}
|
||||
</Link>
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
});
|
||||
return (
|
||||
<Table striped bordered>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Process Group</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>{rows}</tbody>
|
||||
</Table>
|
||||
);
|
||||
};
|
||||
|
||||
const processGroupsDisplayArea = () => {
|
||||
const { page, perPage } = getPageInfoFromSearchParams(searchParams);
|
||||
let displayText = null;
|
||||
if (processGroups?.length > 0) {
|
||||
displayText = (
|
||||
<>
|
||||
<h3>Browse</h3>
|
||||
<PaginationForTable
|
||||
page={page}
|
||||
perPage={perPage}
|
||||
pagination={pagination as any}
|
||||
tableToDisplay={buildTable()}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
} else {
|
||||
displayText = <p>No Groups To Display</p>;
|
||||
}
|
||||
return displayText;
|
||||
};
|
||||
|
||||
const processModelSearchArea = () => {
|
||||
const processModelSearchOnChange = (selection: CarbonComboBoxSelection) => {
|
||||
const processModel = selection.selectedItem;
|
||||
navigate(
|
||||
`/admin/process-models/${modifyProcessModelPath(processModel.id)}`
|
||||
`/admin/process-models/${modifyProcessIdentifierForPathParam(
|
||||
processModel.id
|
||||
)}`
|
||||
);
|
||||
};
|
||||
return (
|
||||
|
@ -131,7 +62,7 @@ export default function ProcessGroupList() {
|
|||
);
|
||||
};
|
||||
|
||||
if (pagination) {
|
||||
if (processModelAvailableItems) {
|
||||
return (
|
||||
<>
|
||||
<ProcessBreadcrumb hotCrumbs={[['Process Groups']]} />
|
||||
|
@ -142,9 +73,10 @@ export default function ProcessGroupList() {
|
|||
<br />
|
||||
<br />
|
||||
</Can>
|
||||
<br />
|
||||
{processModelSearchArea()}
|
||||
<br />
|
||||
{processGroupsDisplayArea()}
|
||||
<ProcessGroupListTiles />
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -8,8 +8,8 @@ import PaginationForTable from '../components/PaginationForTable';
|
|||
import HttpService from '../services/HttpService';
|
||||
import {
|
||||
getPageInfoFromSearchParams,
|
||||
modifyProcessModelPath,
|
||||
unModifyProcessModelPath,
|
||||
modifyProcessIdentifierForPathParam,
|
||||
unModifyProcessIdentifierForPathParam,
|
||||
} from '../helpers';
|
||||
import {
|
||||
PaginationObject,
|
||||
|
@ -19,6 +19,8 @@ import {
|
|||
} from '../interfaces';
|
||||
import { useUriListForPermissions } from '../hooks/UriListForPermissions';
|
||||
import { usePermissionFetcher } from '../hooks/PermissionService';
|
||||
import ProcessGroupListTiles from '../components/ProcessGroupListTiles';
|
||||
// import ProcessModelListTiles from '../components/ProcessModelListTiles';
|
||||
|
||||
export default function ProcessGroupShow() {
|
||||
const params = useParams();
|
||||
|
@ -26,11 +28,8 @@ export default function ProcessGroupShow() {
|
|||
|
||||
const [processGroup, setProcessGroup] = useState<ProcessGroup | null>(null);
|
||||
const [processModels, setProcessModels] = useState([]);
|
||||
const [processGroups, setProcessGroups] = useState([]);
|
||||
const [modelPagination, setModelPagination] =
|
||||
useState<PaginationObject | null>(null);
|
||||
const [groupPagination, setGroupPagination] =
|
||||
useState<PaginationObject | null>(null);
|
||||
|
||||
const { targetUris } = useUriListForPermissions();
|
||||
const permissionRequestData: PermissionsToCheck = {
|
||||
|
@ -47,23 +46,15 @@ export default function ProcessGroupShow() {
|
|||
setProcessModels(result.results);
|
||||
setModelPagination(result.pagination);
|
||||
};
|
||||
const setProcessGroupFromResult = (result: any) => {
|
||||
setProcessGroups(result.results);
|
||||
setGroupPagination(result.pagination);
|
||||
};
|
||||
const processResult = (result: any) => {
|
||||
setProcessGroup(result);
|
||||
const unmodifiedProcessGroupId = unModifyProcessModelPath(
|
||||
const unmodifiedProcessGroupId = unModifyProcessIdentifierForPathParam(
|
||||
(params as any).process_group_id
|
||||
);
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/process-models?process_group_identifier=${unmodifiedProcessGroupId}&per_page=${perPage}&page=${page}`,
|
||||
successCallback: setProcessModelFromResult,
|
||||
});
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/process-groups?process_group_identifier=${unmodifiedProcessGroupId}&per_page=${perPage}&page=${page}`,
|
||||
successCallback: setProcessGroupFromResult,
|
||||
});
|
||||
};
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/process-groups/${params.process_group_id}`,
|
||||
|
@ -76,9 +67,8 @@ export default function ProcessGroupShow() {
|
|||
return null;
|
||||
}
|
||||
const rows = processModels.map((row: ProcessModel) => {
|
||||
const modifiedProcessModelId: String = modifyProcessModelPath(
|
||||
(row as any).id
|
||||
);
|
||||
const modifiedProcessModelId: String =
|
||||
modifyProcessIdentifierForPathParam((row as any).id);
|
||||
return (
|
||||
<tr key={row.id}>
|
||||
<td>
|
||||
|
@ -95,7 +85,7 @@ export default function ProcessGroupShow() {
|
|||
});
|
||||
return (
|
||||
<div>
|
||||
<h3>Process Models</h3>
|
||||
<h2>Process Models</h2>
|
||||
<Table striped bordered>
|
||||
<thead>
|
||||
<tr>
|
||||
|
@ -109,45 +99,11 @@ export default function ProcessGroupShow() {
|
|||
);
|
||||
};
|
||||
|
||||
const buildGroupTable = () => {
|
||||
if (processGroup === null) {
|
||||
return null;
|
||||
}
|
||||
const rows = processGroups.map((row: ProcessGroup) => {
|
||||
const modifiedProcessGroupId: String = modifyProcessModelPath(row.id);
|
||||
return (
|
||||
<tr key={row.id}>
|
||||
<td>
|
||||
<Link
|
||||
to={`/admin/process-groups/${modifiedProcessGroupId}`}
|
||||
data-qa="process-model-show-link"
|
||||
>
|
||||
{row.id}
|
||||
</Link>
|
||||
</td>
|
||||
<td>{row.display_name}</td>
|
||||
</tr>
|
||||
);
|
||||
});
|
||||
return (
|
||||
<div>
|
||||
<h3>Process Groups</h3>
|
||||
<Table striped bordered>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Process Group Id</th>
|
||||
<th>Display Name</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>{rows}</tbody>
|
||||
</Table>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
if (processGroup && groupPagination && modelPagination) {
|
||||
if (processGroup && modelPagination) {
|
||||
const { page, perPage } = getPageInfoFromSearchParams(searchParams);
|
||||
const modifiedProcessGroupId = modifyProcessModelPath(processGroup.id);
|
||||
const modifiedProcessGroupId = modifyProcessIdentifierForPathParam(
|
||||
processGroup.id
|
||||
);
|
||||
return (
|
||||
<>
|
||||
<ProcessBreadcrumb
|
||||
|
@ -157,6 +113,7 @@ export default function ProcessGroupShow() {
|
|||
]}
|
||||
/>
|
||||
<h1>Process Group: {processGroup.display_name}</h1>
|
||||
<p className="process-description">{processGroup.description}</p>
|
||||
<ul>
|
||||
<Stack orientation="horizontal" gap={3}>
|
||||
<Can I="POST" a={targetUris.processGroupListPath} ability={ability}>
|
||||
|
@ -187,6 +144,10 @@ export default function ProcessGroupShow() {
|
|||
</Stack>
|
||||
<br />
|
||||
<br />
|
||||
{/* <ProcessModelListTiles
|
||||
headerElement={<h2>Process Models</h2>}
|
||||
processGroup={processGroup}
|
||||
/> */}
|
||||
{/* eslint-disable-next-line sonarjs/no-gratuitous-expressions */}
|
||||
{modelPagination && modelPagination.total > 0 && (
|
||||
<PaginationForTable
|
||||
|
@ -198,15 +159,10 @@ export default function ProcessGroupShow() {
|
|||
)}
|
||||
<br />
|
||||
<br />
|
||||
{/* eslint-disable-next-line sonarjs/no-gratuitous-expressions */}
|
||||
{groupPagination && groupPagination.total > 0 && (
|
||||
<PaginationForTable
|
||||
page={page}
|
||||
perPage={perPage}
|
||||
pagination={groupPagination}
|
||||
tableToDisplay={buildGroupTable()}
|
||||
/>
|
||||
)}
|
||||
<ProcessGroupListTiles
|
||||
processGroup={processGroup}
|
||||
headerElement={<h2>Process Groups</h2>}
|
||||
/>
|
||||
</ul>
|
||||
</>
|
||||
);
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue