this is now working and stores state in a sqlite3 db w/ burnettk

This commit is contained in:
jasquat 2022-05-16 15:21:35 -04:00
parent 4330fdb7ee
commit 4b3a91c13c
13 changed files with 306 additions and 424 deletions

26
bin/test_with_curl Executable file
View File

@ -0,0 +1,26 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
if [[ "${1:-}" == "c" ]]; then
curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{}'
elif grep -qE '^[0-9]$' <<<"${1:-}" ; then
curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d "{ \"task_identifier\": \"${1}\"}"
else
rm -rf currentstate.json
curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Product Name": "G", "Quantity": "2"}}'
curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Sleeve Type": "Short"}}'
curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Continue shopping?": "N"}}'
curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Shipping Method": "Overnight"}}'
curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Shipping Address": "Somewhere"}}'
curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Place Order": "Y"}}'
curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Card Number": "MY_CARD"}}'
curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "2", "answer": {"Was the customer charged?": "Y"}}'
curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Was the product available?": "Y"}}'
curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Was the order shipped?": "Y"}}'
fi

1
migrations/README Normal file
View File

@ -0,0 +1 @@
Single-database configuration for Flask.

50
migrations/alembic.ini Normal file
View File

@ -0,0 +1,50 @@
# A generic, single database configuration.
[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic,flask_migrate
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[logger_flask_migrate]
level = INFO
handlers =
qualname = flask_migrate
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

91
migrations/env.py Normal file
View File

@ -0,0 +1,91 @@
from __future__ import with_statement
import logging
from logging.config import fileConfig
from flask import current_app
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
config.set_main_option(
'sqlalchemy.url',
str(current_app.extensions['migrate'].db.get_engine().url).replace(
'%', '%%'))
target_metadata = current_app.extensions['migrate'].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
connectable = current_app.extensions['migrate'].db.get_engine()
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

24
migrations/script.py.mako Normal file
View File

@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,38 @@
"""empty message
Revision ID: c73669985652
Revises:
Create Date: 2022-05-16 15:19:43.112086
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c73669985652'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('process_models',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('bpmn_json', sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('user')
op.drop_table('process_models')
# ### end Alembic commands ###

View File

@ -1,122 +0,0 @@
"""Methods to talk to the database."""
import json
from datetime import datetime
from typing import Any
from spiff_workflow_webapp.models.data_store import DataStoreModel
from spiff_workflow_webapp.models.data_store import DataStoreSchema
from spiff_workflow_webapp.services.data_store_service import DataStoreBase
from flask import Blueprint
from sqlalchemy.orm import Session # type: ignore
from spiff_workflow_webapp.api.api_error import ApiError
# from crc import session
def construct_blueprint(database_session: Session) -> Blueprint:
"""Construct_blueprint."""
data_store_blueprint = Blueprint("data_store", __name__)
database_session = database_session
def study_multi_get(study_id: str) -> Any:
"""Get all data_store values for a given study_id study."""
if study_id is None:
raise ApiError("unknown_study", "Please provide a valid Study ID.")
dsb = DataStoreBase()
retval = dsb.get_multi_common(study_id, None)
results = DataStoreSchema(many=True).dump(retval)
return results
def user_multi_get(user_id: str) -> Any:
"""Get all data values in the data_store for a userid."""
if user_id is None:
raise ApiError("unknown_study", "Please provide a valid UserID.")
dsb = DataStoreBase()
retval = dsb.get_multi_common(None, user_id)
results = DataStoreSchema(many=True).dump(retval)
return results
def file_multi_get(file_id: str) -> Any:
"""Get all data values in the data store for a file_id."""
if file_id is None:
raise ApiError(
code="unknown_file", message="Please provide a valid file id."
)
dsb = DataStoreBase()
retval = dsb.get_multi_common(None, None, file_id=file_id)
results = DataStoreSchema(many=True).dump(retval)
return results
def datastore_del(id: str) -> Any:
"""Delete a data store item for a key."""
database_session.query(DataStoreModel).filter_by(id=id).delete()
database_session.commit()
json_value = json.dumps("deleted", ensure_ascii=False, indent=2)
return json_value
def datastore_get(id: str) -> Any:
"""Retrieve a data store item by a key."""
item = database_session.query(DataStoreModel).filter_by(id=id).first()
results = DataStoreSchema(many=False).dump(item)
return results
def update_datastore(id: str, body: dict) -> Any:
"""Allow a modification to a datastore item."""
if id is None:
raise ApiError("unknown_id", "Please provide a valid ID.")
item = database_session.query(DataStoreModel).filter_by(id=id).first()
if item is None:
raise ApiError("unknown_item", 'The item "' + id + '" is not recognized.')
DataStoreSchema().load(body, instance=item, database_session=database_session)
item.last_updated = datetime.utcnow()
database_session.add(item)
database_session.commit()
return DataStoreSchema().dump(item)
def add_datastore(body: dict) -> Any:
"""Add a new datastore item."""
if body.get(id, None):
raise ApiError(
"id_specified", "You may not specify an id for a new datastore item"
)
if "key" not in body:
raise ApiError(
"no_key", "You need to specify a key to add a datastore item"
)
if "value" not in body:
raise ApiError(
"no_value", "You need to specify a value to add a datastore item"
)
if (
("user_id" not in body)
and ("study_id" not in body)
and ("file_id" not in body)
):
raise ApiError(
"conflicting_values",
"A datastore item should have either a study_id, user_id or file_id ",
)
present = 0
for field in ["user_id", "study_id", "file_id"]:
if field in body:
present = present + 1
if present > 1:
message = "A datastore item should have one of a study_id, user_id or a file_id but not more than one of these"
raise ApiError("conflicting_values", message)
item = DataStoreSchema().load(body)
# item.last_updated = datetime.utcnow() # Do this in the database
database_session.add(item)
database_session.commit()
return DataStoreSchema().dump(item)
return data_store_blueprint

Binary file not shown.

View File

@ -1,32 +0,0 @@
"""Data_store."""
from crc import db
from flask_marshmallow.sqla import SQLAlchemyAutoSchema
from sqlalchemy import func
class DataStoreModel(db.Model):
"""DataStoreModel."""
__tablename__ = "data_store"
id = db.Column(db.Integer, primary_key=True)
last_updated = db.Column(db.DateTime(timezone=True), server_default=func.now())
key = db.Column(db.String, nullable=False)
workflow_id = db.Column(db.Integer)
study_id = db.Column(db.Integer, nullable=True)
task_spec = db.Column(db.String)
spec_id = db.Column(db.String)
user_id = db.Column(db.String, nullable=True)
file_id = db.Column(db.Integer, db.ForeignKey("file.id"), nullable=True)
value = db.Column(db.String)
class DataStoreSchema(SQLAlchemyAutoSchema):
"""DataStoreSchema."""
class Meta:
"""Meta."""
model = DataStoreModel
load_instance = True
include_fk = True
sqla_session = db.session

View File

@ -0,0 +1,8 @@
from ..extensions import db
from sqlalchemy.orm import deferred
class ProcessModel(db.Model):
__tablename__ = 'process_models'
id = db.Column(db.Integer, primary_key=True)
bpmn_json = deferred(db.Column(db.JSON))

View File

@ -2,26 +2,37 @@
import os import os
from flask import Blueprint from flask import Blueprint
from flask import request
from ..models.user import User from ..models.user import User
from spiff_workflow_webapp.spiff_workflow_connector import parse from spiff_workflow_webapp.spiff_workflow_connector import parse
from spiff_workflow_webapp.spiff_workflow_connector import run from spiff_workflow_webapp.spiff_workflow_connector import run
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
from SpiffWorkflow.camunda.serializer.task_spec_converters import UserTaskConverter
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter
from spiff_workflow_webapp.models.process_model import ProcessModel
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter([ UserTaskConverter, BusinessRuleTaskConverter ])
serializer = BpmnWorkflowSerializer(wf_spec_converter)
api = Blueprint("api", __name__) api = Blueprint("api", __name__)
@api.route("/user/<name>") @api.route("/user/<name>")
def create_user(name): def create_user(name):
"""Create_user.""" """Create_user."""
user = User.query.filter_by(name="Anthony").first() user = User.query.filter_by(name=name).first()
return {"user": user.name} return {"user": user.name}
@api.route("/run_process", defaults={"answer": None, "task_identifier": None}) # @api.route("/run_process", defaults={"answer": None, "task_identifier": None})
@api.route("/run_process/<task_identifier>", defaults={"answer": None}) # @api.route("/run_process/<task_identifier>", defaults={"answer": None})
@api.route("/run_process/<task_identifier>/<answer>") # @api.route("/run_process/<task_identifier>/<answer>")
def run_process(task_identifier, answer): # def run_process(task_identifier, answer):
@api.route("/run_process", methods=['POST'])
def run_process():
"""Run_process.""" """Run_process."""
# parser = argparse.ArgumentParser("Simple BPMN runner") # parser = argparse.ArgumentParser("Simple BPMN runner")
# parser.add_argument( # parser.add_argument(
@ -47,7 +58,9 @@ def run_process(task_identifier, answer):
# ) # )
# args = parser.parse_args() # args = parser.parse_args()
# content = request.json content = request.json
# if 'task_identifier' in content:
homedir = os.environ.get("HOME") homedir = os.environ.get("HOME")
process = "order_product" process = "order_product"
dmn = [ dmn = [
@ -64,8 +77,13 @@ def run_process(task_identifier, answer):
# with open(args.restore) as state: # with open(args.restore) as state:
# wf = serializer.deserialize_json(state.read()) # wf = serializer.deserialize_json(state.read())
# else: # else:
workflow = parse(process, bpmn, dmn) workflow = None
response = run(workflow, task_identifier, answer) process_model = ProcessModel.query.filter().first()
if process_model is None:
workflow = parse(process, bpmn, dmn)
else:
workflow = serializer.deserialize_json(process_model.bpmn_json)
response = run(workflow, content.get("task_identifier"), content.get("answer"))
# except Exception: # except Exception:
# sys.stderr.write(traceback.format_exc()) # sys.stderr.write(traceback.format_exc())
# sys.exit(1) # sys.exit(1)

View File

@ -1,219 +0,0 @@
"""Data_store_service."""
from crc import session
from crc.models.data_store import DataStoreModel
from crc.models.workflow import WorkflowModel
from flask import g
import sqlalchemy
from sqlalchemy import desc
from spiff_workflow_webapp.api.api_error import ApiError
from typing import Any
class DataStoreBase:
"""DataStoreBase."""
def set_validate_common(
self, task_id, study_id, workflow_id, script_name, user_id, file_id, *args
):
"""Set_validate_common."""
self.check_args_2(args, script_name)
key = args[0]
value = args[1]
if script_name == "study_data_set":
record = {
"task_id": task_id,
"study_id": study_id,
"workflow_id": workflow_id,
key: value,
}
elif script_name == "file_data_set":
record = {
"task_id": task_id,
"study_id": study_id,
"workflow_id": workflow_id,
"file_id": file_id,
key: value,
}
elif script_name == "user_data_set":
record = {
"task_id": task_id,
"study_id": study_id,
"workflow_id": workflow_id,
"user_id": user_id,
key: value,
}
g.validation_data_store.append(record)
return record
def get_validate_common(
self, script_name, study_id=None, user_id=None, file_id=None, *args
):
"""This method uses a temporary validation_data_store that is only available for the current validation request.
This allows us to set data_store values during validation that don't affect the real data_store.
For data_store `gets`, we first look in the temporary validation_data_store.
If we don't find an entry in validation_data_store, we look in the real data_store.
"""
key = args[0]
if script_name == "study_data_get":
# If it's in the validation data store, return it
for record in g.validation_data_store:
if (
"study_id" in record
and record["study_id"] == study_id
and key in record
):
return record[key]
# If not in validation_data_store, look in the actual data_store
return self.get_data_common(
study_id, user_id, "study_data_get", file_id, *args
)
elif script_name == "file_data_get":
for record in g.validation_data_store:
if (
"file_id" in record
and record["file_id"] == file_id
and key in record
):
return record[key]
return self.get_data_common(
study_id, user_id, "file_data_get", file_id, *args
)
elif script_name == "user_data_get":
for record in g.validation_data_store:
if (
"user_id" in record
and record["user_id"] == user_id
and key in record
):
return record[key]
return self.get_data_common(
study_id, user_id, "user_data_get", file_id, *args
)
@staticmethod
def check_args(args: Any, maxlen: int = 1, script_name: str = "study_data_get") -> None:
"""Check_args."""
if len(args) < 1 or len(args) > maxlen:
raise ApiError(
code="missing_argument",
message=f"The {script_name} script takes either one or two arguments, "
f"starting with the key and an optional default",
)
@staticmethod
def check_args_2(args: Tuple[str, str], script_name: str = "study_data_set"):
"""Check_args_2."""
if len(args) != 2:
raise ApiError(
code="missing_argument",
message=f"The {script_name} script takes two arguments, key and value, in that order.",
)
def set_data_common(
self, task_spec, study_id, user_id, workflow_id, script_name, file_id, *args
):
"""Set_data_common."""
self.check_args_2(args, script_name=script_name)
key = args[0]
value = args[1]
if value == "" or value is None:
# We delete the data store if the value is empty
return self.delete_data_store(study_id, user_id, file_id, *args)
workflow_spec_id = None
if workflow_id is not None:
workflow = (
session.query(WorkflowModel)
.filter(WorkflowModel.id == workflow_id)
.first()
)
workflow_spec_id = workflow.workflow_spec_id
# Check if this data store is previously set
query = session.query(DataStoreModel).filter(DataStoreModel.key == key)
if study_id:
query = query.filter(DataStoreModel.study_id == study_id)
elif file_id:
query = query.filter(DataStoreModel.file_id == file_id)
elif user_id:
query = query.filter(DataStoreModel.user_id == user_id)
result = query.order_by(desc(DataStoreModel.last_updated)).all()
if result:
dsm = result[0]
dsm.value = value
if task_spec:
dsm.task_spec = task_spec
if workflow_id:
dsm.workflow_id = workflow_id
if workflow_spec_id:
dsm.spec_id = workflow_spec_id
if len(result) > 1:
# We had a bug where we had created new records instead of updating values of existing records
# This just gets rid of all the old unused records
self.delete_extra_data_stores(result[1:])
else:
dsm = DataStoreModel(
key=key,
value=value,
study_id=study_id,
task_spec=task_spec,
user_id=user_id, # Make this available to any User
file_id=file_id,
workflow_id=workflow_id,
spec_id=workflow_spec_id,
)
session.add(dsm)
session.commit()
return dsm.value
def get_data_common(self, study_id: str, user_id: str, script_name: str, file_id: (str | None) = None, *args: Any) -> Any:
"""Get_data_common."""
self.check_args(args, 2, script_name)
record = (
session.query(DataStoreModel)
.filter_by(study_id=study_id, user_id=user_id, file_id=file_id, key=args[0])
.first()
)
if record:
return record.value
else:
# This is a possible default value passed in from the data_store get methods
if len(args) == 2:
return args[1]
@staticmethod
def get_multi_common(study_id: str, user_id: str, file_id: (str | None) = None) -> sqlalchemy.orm.Query:
"""Get_multi_common."""
results = session.query(DataStoreModel).filter_by(
study_id=study_id, user_id=user_id, file_id=file_id
)
return results
@staticmethod
def delete_data_store(study_id: str, user_id: str, file_id: str, *args: Any) -> None:
"""Delete_data_store."""
query = session.query(DataStoreModel).filter(DataStoreModel.key == args[0])
if user_id:
query = query.filter(DataStoreModel.user_id == user_id)
elif file_id:
query = query.filter(DataStoreModel.file_id == file_id)
elif study_id:
query = query.filter(DataStoreModel.study_id == study_id)
record = query.first()
if record is not None:
session.delete(record)
session.commit()
@staticmethod
def delete_extra_data_stores(records: list[DataStoreModel]) -> None:
"""We had a bug where we created new records instead of updating existing records.
We use this to clean up all the extra records.
We may remove this method in the future.
"""
for record in records:
session.query(DataStoreModel).filter(
DataStoreModel.id == record.id
).delete()
session.commit()

View File

@ -19,6 +19,9 @@ from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter
from SpiffWorkflow.task import Task from SpiffWorkflow.task import Task
from spiff_workflow_webapp.extensions import db
from spiff_workflow_webapp.models.process_model import ProcessModel
# from custom_script_engine import CustomScriptEngine # from custom_script_engine import CustomScriptEngine
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter( wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(
@ -43,15 +46,6 @@ def parse(process, bpmn_files, dmn_files):
return BpmnWorkflow(parser.get_spec(process)) return BpmnWorkflow(parser.get_spec(process))
def select_option(prompt, options):
"""Select_option."""
option = input(prompt)
while option not in options:
print("Invalid selection")
option = input(prompt)
return option
def display_task(task): def display_task(task):
"""Display_task.""" """Display_task."""
print(f"\n{task.task_spec.description}") print(f"\n{task.task_spec.description}")
@ -84,29 +78,30 @@ def complete_user_task(task, answer=None):
if answer is None: if answer is None:
required_user_input_fields[field.label] = options required_user_input_fields[field.label] = options
else: else:
response = option_map[answer] response = option_map[answer[field.label]]
elif field.type == "string":
if answer is None:
required_user_input_fields[field.label] = "STRING"
else:
response = answer[field.label]
else: else:
if answer is None: if answer is None:
required_user_input_fields[field.label] = "(1..)" required_user_input_fields[field.label] = "(1..)"
else: else:
if field.type == "long": if field.type == "long":
response = int(answer) response = int(answer[field.label])
task.update_data_var(field.id, response) if answer:
task.update_data_var(field.id, response)
return required_user_input_fields return required_user_input_fields
def complete_manual_task(task):
"""Complete_manual_task."""
display_task(task)
input("Press any key to mark task complete")
def print_state(workflow): def print_state(workflow):
"""Print_state.""" """Print_state."""
task = workflow.last_task task = workflow.last_task
print("\nLast Task") # print("\nLast Task")
print(format_task(task)) # print(format_task(task))
print(json.dumps(task.data, indent=2, separators=[", ", ": "])) # print(json.dumps(task.data, indent=2, separators=[", ", ": "]))
return_json = {"last_task": format_task(task)}
display_types = (UserTask, ManualTask, ScriptTask, ThrowingEvent, CatchingEvent) display_types = (UserTask, ManualTask, ScriptTask, ThrowingEvent, CatchingEvent)
all_tasks = [ all_tasks = [
@ -118,13 +113,14 @@ def print_state(workflow):
task for task in all_tasks if task.state in [Task.READY, Task.WAITING] task for task in all_tasks if task.state in [Task.READY, Task.WAITING]
] ]
print("\nUpcoming Tasks") return_json['upcoming_tasks'] = []
for _idx, task in enumerate(upcoming_tasks): for _idx, task in enumerate(upcoming_tasks):
print(format_task(task)) return_json['upcoming_tasks'].append(format_task(task))
if input("\nShow all tasks? ").lower() == "y": # if input("\nShow all tasks? ").lower() == "y":
for _idx, task in enumerate(all_tasks): # for _idx, task in enumerate(all_tasks):
print(format_task(task)) # print(format_task(task))
return return_json
def run(workflow, task_identifier=None, answer=None): def run(workflow, task_identifier=None, answer=None):
@ -146,19 +142,6 @@ def run(workflow, task_identifier=None, answer=None):
if task_identifier is None: if task_identifier is None:
return formatted_options return formatted_options
# selected = None
# while selected not in options and selected not in ["", "D", "d", "exit"]:
# selected = input(
# "Select task to complete, enter to wait, or D to dump the workflow state: "
# )
# if selected.lower() == "d":
# filename = input("Enter filename: ")
# state = serializer.serialize_json(workflow)
# with open(filename, "w") as dump:
# dump.write(state)
# elif selected == "exit":
# exit()
next_task = options[task_identifier] next_task = options[task_identifier]
if isinstance(next_task.task_spec, UserTask): if isinstance(next_task.task_spec, UserTask):
if answer is None: if answer is None:
@ -167,18 +150,34 @@ def run(workflow, task_identifier=None, answer=None):
complete_user_task(next_task, answer) complete_user_task(next_task, answer)
next_task.complete() next_task.complete()
elif isinstance(next_task.task_spec, ManualTask): elif isinstance(next_task.task_spec, ManualTask):
complete_manual_task(next_task)
next_task.complete() next_task.complete()
else: else:
next_task.complete() next_task.complete()
workflow.refresh_waiting_tasks() workflow.refresh_waiting_tasks()
workflow.do_engine_steps() workflow.do_engine_steps()
tasks_status = {}
if step: if step:
print_state(workflow) tasks_status = print_state(workflow)
print("\nWorkflow Data") ready_tasks = workflow.get_ready_user_tasks()
print(json.dumps(workflow.data, indent=2, separators=[", ", ": "])) formatted_options = {}
for idx, task in enumerate(ready_tasks):
option = format_task(task, False)
formatted_options[str(idx + 1)] = option
state = serializer.serialize_json(workflow)
process_model = ProcessModel.query.filter().first()
if process_model is None:
process_model = ProcessModel()
process_model.bpmn_json = state
db.session.add(process_model)
db.session.commit()
# with open("currentstate.json", "w") as dump:
# dump.write(state)
tasks_status["next_activity"] = formatted_options
return tasks_status
if __name__ == "__main__": if __name__ == "__main__":