started migrating code for updating task form data w/ burnettk
This commit is contained in:
parent
405277d323
commit
e57a64361c
|
@ -1,3 +1,5 @@
|
|||
from __future__ import with_statement
|
||||
|
||||
import logging
|
||||
from logging.config import fileConfig
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 2cbb6d60f0ac
|
||||
Revision ID: 86509963c525
|
||||
Revises:
|
||||
Create Date: 2022-06-30 10:45:49.832257
|
||||
Create Date: 2022-06-30 11:55:54.677991
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
@ -10,7 +10,7 @@ import sqlalchemy as sa
|
|||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '2cbb6d60f0ac'
|
||||
revision = '86509963c525'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
@ -92,7 +92,7 @@ def upgrade():
|
|||
)
|
||||
op.create_table('active_task',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('task_id', sa.String(length=50), nullable=False),
|
||||
sa.Column('spiffworkflow_task_id', sa.String(length=50), nullable=False),
|
||||
sa.Column('process_instance_id', sa.Integer(), nullable=False),
|
||||
sa.Column('assigned_principal_id', sa.Integer(), nullable=True),
|
||||
sa.Column('process_instance_data', sa.Text(), nullable=True),
|
||||
|
@ -103,7 +103,7 @@ def upgrade():
|
|||
sa.ForeignKeyConstraint(['assigned_principal_id'], ['principal.id'], ),
|
||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('task_id', 'process_instance_id', name='active_task_unique')
|
||||
sa.UniqueConstraint('spiffworkflow_task_id', 'process_instance_id', name='active_task_unique')
|
||||
)
|
||||
op.create_table('file',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
|
@ -598,7 +598,7 @@ description = "Flask Bpmn"
|
|||
category = "main"
|
||||
optional = false
|
||||
python-versions = "^3.7"
|
||||
develop = true
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
click = "^8.0.1"
|
||||
|
@ -612,12 +612,14 @@ flask-migrate = "*"
|
|||
flask-restful = "*"
|
||||
sentry-sdk = "0.14.4"
|
||||
sphinx-autoapi = "^1.8.4"
|
||||
spiffworkflow = {path = "/home/jason/projects/github/sartography/SpiffWorkflow", develop = true}
|
||||
spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "feature/parse_spiffworkflow_extensions"}
|
||||
werkzeug = "*"
|
||||
|
||||
[package.source]
|
||||
type = "directory"
|
||||
url = "../flask-bpmn"
|
||||
type = "git"
|
||||
url = "https://github.com/sartography/flask-bpmn"
|
||||
reference = "main"
|
||||
resolved_reference = "e421d8797975a1360e5057067072b16f8ebe934a"
|
||||
|
||||
[[package]]
|
||||
name = "flask-cors"
|
||||
|
@ -1715,11 +1717,11 @@ test = ["pytest"]
|
|||
[[package]]
|
||||
name = "SpiffWorkflow"
|
||||
version = "1.1.7"
|
||||
description = "A workflow framework and BPMN/DMN Processor"
|
||||
description = ""
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
develop = true
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
celery = "*"
|
||||
|
@ -1729,8 +1731,10 @@ lxml = "*"
|
|||
pytz = "*"
|
||||
|
||||
[package.source]
|
||||
type = "directory"
|
||||
url = "../SpiffWorkflow"
|
||||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "feature/parse_spiffworkflow_extensions"
|
||||
resolved_reference = "67054883d4040d6755bf0555f072ff85aa42093c"
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
|
@ -2017,7 +2021,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-
|
|||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.9"
|
||||
content-hash = "ca131da7f816bbda38aa2aea41679280d6d941bb79cd98d7d057db34acc1489a"
|
||||
content-hash = "e737217fd58d75c3a35e968d36ed2f1b2579bf8554ec24ec5452bbfb465fd509"
|
||||
|
||||
[metadata.files]
|
||||
alabaster = [
|
||||
|
|
|
@ -27,13 +27,13 @@ flask-marshmallow = "*"
|
|||
flask-migrate = "*"
|
||||
flask-restful = "*"
|
||||
werkzeug = "*"
|
||||
# spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
|
||||
spiffworkflow = {develop = true, path = "/home/jason/projects/github/sartography/SpiffWorkflow"}
|
||||
spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "feature/parse_spiffworkflow_extensions"}
|
||||
# spiffworkflow = {develop = true, path = "/home/jason/projects/github/sartography/SpiffWorkflow"}
|
||||
sentry-sdk = "0.14.4"
|
||||
sphinx-autoapi = "^1.8.4"
|
||||
flask-bpmn = {develop = true, path = "/home/jason/projects/github/sartography/flask-bpmn"}
|
||||
# flask-bpmn = {develop = true, path = "/home/jason/projects/github/sartography/flask-bpmn"}
|
||||
# flask-bpmn = {develop = true, path = "/Users/kevin/projects/github/sartography/flask-bpmn"}
|
||||
# flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"}
|
||||
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"}
|
||||
mysql-connector-python = "^8.0.29"
|
||||
pytest-flask = "^1.2.0"
|
||||
pytest-flask-sqlalchemy = "^1.1.0"
|
||||
|
|
|
@ -19,7 +19,7 @@ class ActiveTaskModel(SpiffworkflowBaseDBModel):
|
|||
__tablename__ = "active_task"
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint(
|
||||
"task_id", "process_instance_id", name="active_task_unique"
|
||||
"spiffworkflow_task_id", "process_instance_id", name="active_task_unique"
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -27,9 +27,9 @@ class ActiveTaskModel(SpiffworkflowBaseDBModel):
|
|||
bpmn_json: str = ""
|
||||
assigned_principal: RelationshipProperty[PrincipalModel] = relationship(PrincipalModel)
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
task_id: str = db.Column(db.String(50), nullable=False)
|
||||
spiffworkflow_task_id: str = db.Column(db.String(50), nullable=False)
|
||||
process_instance_id: int = db.Column(
|
||||
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
|
||||
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
|
||||
)
|
||||
assigned_principal_id: int = db.Column(ForeignKey(PrincipalModel.id))
|
||||
process_instance_data: str = db.Column(db.Text)
|
||||
|
|
|
@ -497,10 +497,97 @@ def task_submit_user_data(
|
|||
task_id: int, body: Dict[str, Any]
|
||||
) -> flask.wrappers.Response:
|
||||
"""Task_submit_user_data."""
|
||||
print(f"body: {body}")
|
||||
print(f"task_id: {task_id}")
|
||||
principal = PrincipalModel.query.filter_by(user_id=g.user.id).first()
|
||||
if principal is None:
|
||||
raise (
|
||||
ApiError(
|
||||
code="principal_not_found",
|
||||
message=f"Principal not found from user id: {g.user.id}",
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
active_task_assigned_to_me = ActiveTaskModel.query.filter_by(
|
||||
id=task_id, assigned_principal_id=principal.id
|
||||
).first()
|
||||
if active_task_assigned_to_me is None:
|
||||
raise (
|
||||
ApiError(
|
||||
code="task_not_found",
|
||||
message=f"Task not found for principal user: {g.user.id} and id: {task_id}",
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
|
||||
process_instance = ProcessInstanceModel.query.filter_by(process_instance_id=active_task_assigned_to_me.process_instance.id)
|
||||
if process_instance is None:
|
||||
raise (
|
||||
ApiError(
|
||||
code="process_instance_cannot_be_found",
|
||||
message=f"Process instance cannot be found for active task: {active_task_assigned_to_me.id}",
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
# def update_task(workflow_id, task_id, body, terminate_loop=None, update_all=False):
|
||||
# workflow_model = session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
||||
# if workflow_model is None:
|
||||
# raise ApiError("invalid_workflow_id", "The given workflow id is not valid.", status_code=404)
|
||||
# if workflow_model.state in ('hidden', 'disabled', 'locked'):
|
||||
# raise ApiError(code='locked_workflow',
|
||||
# message='You tried to update a task for a workflow that is hidden, locked, or disabled.')
|
||||
#
|
||||
# processor = WorkflowProcessor(workflow_model)
|
||||
# task_id = uuid.UUID(task_id)
|
||||
# spiff_task = processor.bpmn_workflow.get_task(task_id)
|
||||
# _verify_user_and_role(processor, spiff_task)
|
||||
# user = UserService.current_user(allow_admin_impersonate=False) # Always log as the real user.
|
||||
#
|
||||
# if not spiff_task:
|
||||
# raise ApiError("empty_task", "Processor failed to obtain task.", status_code=404)
|
||||
# if spiff_task.state != TaskState.READY:
|
||||
# raise ApiError("invalid_state", "You may not update a task unless it is in the READY state. "
|
||||
# "Consider calling a token reset to make this task Ready.")
|
||||
#
|
||||
# if terminate_loop and spiff_task.is_looping():
|
||||
# spiff_task.terminate_loop()
|
||||
#
|
||||
# # Extract the details specific to the form submitted
|
||||
# form_data = WorkflowService().extract_form_data(body, spiff_task)
|
||||
#
|
||||
# # Update the task
|
||||
# __update_task(processor, spiff_task, form_data, user)
|
||||
#
|
||||
# # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same
|
||||
# # task spec, complete that form as well.
|
||||
# if update_all:
|
||||
# last_index = spiff_task.task_info()["mi_index"]
|
||||
# next_task = processor.next_task()
|
||||
# while next_task and next_task.task_info()["mi_index"] > last_index:
|
||||
# __update_task(processor, next_task, form_data, user)
|
||||
# last_index = next_task.task_info()["mi_index"]
|
||||
# next_task = processor.next_task()
|
||||
#
|
||||
# WorkflowService.update_task_assignments(processor)
|
||||
# workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
|
||||
# return WorkflowApiSchema().dump(workflow_api_model)
|
||||
#
|
||||
#
|
||||
# def __update_task(processor, task, data, user):
|
||||
# """All the things that need to happen when we complete a form, abstracted
|
||||
# here because we need to do it multiple times when completing all tasks in
|
||||
# a multi-instance task"""
|
||||
# task.update_data(data)
|
||||
# WorkflowService.post_process_form(task) # some properties may update the data store.
|
||||
# processor.complete_task(task)
|
||||
# # Log the action before doing the engine steps, as doing so could effect the state of the task
|
||||
# # the workflow could wrap around in the ngine steps, and the task could jump from being completed to
|
||||
# # another state. What we are logging here is the completion.
|
||||
# WorkflowService.log_task_action(user.uid, processor, task, TaskAction.COMPLETE.value)
|
||||
# processor.do_engine_steps()
|
||||
# processor.save()
|
||||
|
||||
|
||||
|
||||
def get_file_from_request() -> Any:
|
||||
"""Get_file_from_request."""
|
||||
|
|
|
@ -348,7 +348,7 @@ class ProcessInstanceProcessor:
|
|||
form_file_name = extensions["formKey"]
|
||||
|
||||
active_task = ActiveTaskModel(
|
||||
task_id=str(ready_or_waiting_task.id),
|
||||
spiffworkflow_task_id=str(ready_or_waiting_task.id),
|
||||
process_instance_id=self.process_instance_model.id,
|
||||
# FIXME: look for the correct principal based on ready_or_waiting_task.lane
|
||||
assigned_principal_id=PrincipalModel.query.first().id,
|
||||
|
|
|
@ -660,7 +660,7 @@ def test_process_instance_list_with_default_list(
|
|||
process_instance_dict = response.json["results"][0]
|
||||
assert type(process_instance_dict["id"]) is int
|
||||
assert process_instance_dict["process_model_identifier"] == process_model_dir_name
|
||||
assert process_instance_dict["process_group_id"] == test_process_group_id
|
||||
assert process_instance_dict["process_group_identifier"] == test_process_group_id
|
||||
assert type(process_instance_dict["start_in_seconds"]) is int
|
||||
assert process_instance_dict["start_in_seconds"] > 0
|
||||
assert process_instance_dict["end_in_seconds"] is None
|
||||
|
|
Loading…
Reference in New Issue