pre-commit is passing w/ burnettk

This commit is contained in:
jasquat 2022-05-13 15:00:47 -04:00
parent 5ea9dd3a44
commit 9b8334f116
12 changed files with 162 additions and 84 deletions

View File

@ -1,3 +1,4 @@
"""__init__."""
from flask import Flask
from .routes.api import api

View File

@ -1,5 +1,6 @@
from flask_sqlalchemy import SQLAlchemy
"""Extensions."""
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
migrate = Migrate()

View File

@ -0,0 +1 @@
"""__init__."""

View File

@ -1,9 +1,12 @@
"""Data_store."""
from crc import db
from flask_marshmallow.sqla import SQLAlchemyAutoSchema
from sqlalchemy import func
class DataStoreModel(db.Model):
"""DataStoreModel."""
__tablename__ = "data_store"
id = db.Column(db.Integer, primary_key=True)
last_updated = db.Column(db.DateTime(timezone=True), server_default=func.now())
@ -18,7 +21,11 @@ class DataStoreModel(db.Model):
class DataStoreSchema(SQLAlchemyAutoSchema):
"""DataStoreSchema."""
class Meta:
"""Meta."""
model = DataStoreModel
load_instance = True
include_fk = True

View File

@ -1,5 +1,9 @@
"""User."""
from ..extensions import db
class User(db.Model):
"""User."""
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50))
name = db.Column(db.String(50))

View File

@ -1,6 +1,10 @@
"""Video."""
from ..extensions import db
class Video(db.Model):
"""Video."""
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50))
url = db.Column(db.String(50))
url = db.Column(db.String(50))

View File

@ -1,17 +1,17 @@
"""Workflow."""
import enum
import marshmallow
from crc import db
from crc import ma
from marshmallow import EXCLUDE
from marshmallow import fields
from marshmallow import INCLUDE
from marshmallow import post_load
from sqlalchemy import func
from sqlalchemy.orm import deferred
class WorkflowSpecCategory:
"""WorkflowSpecCategory."""
def __init__(self, id, display_name, display_order=0, admin=False):
"""__init__."""
self.id = (
@ -34,7 +34,11 @@ class WorkflowSpecCategory:
class WorkflowSpecCategorySchema(ma.Schema):
"""WorkflowSpecCategorySchema."""
class Meta:
"""Meta."""
model = WorkflowSpecCategory
fields = ["id", "display_name", "display_order", "admin"]
@ -45,6 +49,8 @@ class WorkflowSpecCategorySchema(ma.Schema):
class WorkflowSpecInfo:
"""WorkflowSpecInfo."""
def __init__(
self,
id,
@ -55,11 +61,12 @@ class WorkflowSpecInfo:
library=False,
primary_file_name="",
primary_process_id="",
libraries=[],
libraries=None,
category_id="",
display_order=0,
is_review=False,
):
"""__init__."""
self.id = id # Sting unique id
self.display_name = display_name
self.description = description
@ -70,10 +77,14 @@ class WorkflowSpecInfo:
self.primary_file_name = primary_file_name
self.primary_process_id = primary_process_id
self.is_review = is_review
self.libraries = libraries
self.category_id = category_id
if libraries is None:
libraries = []
self.libraries = libraries
def __eq__(self, other):
"""__eq__."""
if not isinstance(other, WorkflowSpecInfo):
return False
if other.id == self.id:
@ -82,7 +93,11 @@ class WorkflowSpecInfo:
class WorkflowSpecInfoSchema(ma.Schema):
"""WorkflowSpecInfoSchema."""
class Meta:
"""Meta."""
model = WorkflowSpecInfo
id = marshmallow.fields.String(required=True)
@ -100,10 +115,13 @@ class WorkflowSpecInfoSchema(ma.Schema):
@post_load
def make_spec(self, data, **kwargs):
"""Make_spec."""
return WorkflowSpecInfo(**data)
class WorkflowState(enum.Enum):
"""WorkflowState."""
hidden = "hidden"
disabled = "disabled"
required = "required"
@ -112,14 +130,18 @@ class WorkflowState(enum.Enum):
@classmethod
def has_value(cls, value):
"""Has_value."""
return value in cls._value2member_map_
@staticmethod
def list():
"""List."""
return list(map(lambda c: c.value, WorkflowState))
class WorkflowStatus(enum.Enum):
"""WorkflowStatus."""
not_started = "not_started"
user_input_required = "user_input_required"
waiting = "waiting"
@ -128,6 +150,8 @@ class WorkflowStatus(enum.Enum):
class WorkflowModel(db.Model):
"""WorkflowModel."""
__tablename__ = "workflow"
id = db.Column(db.Integer, primary_key=True)
bpmn_workflow_json = deferred(db.Column(db.JSON))

View File

@ -0,0 +1 @@
"""__init__."""

View File

@ -1,13 +1,14 @@
"""Api."""
from flask import Blueprint
from ..extensions import db
from ..models.user import User
api = Blueprint('api', __name__)
api = Blueprint("api", __name__)
@api.route('/user/<name>')
@api.route("/user/<name>")
def create_user(name):
"""Create_user."""
user = User.query.filter_by(name='Anthony').first()
user = User.query.filter_by(name="Anthony").first()
return {'user': user.name}
return {"user": user.name}

View File

@ -1,16 +1,17 @@
"""Main."""
from flask import Blueprint
from ..extensions import db
from ..models.user import User
from ..models.video import Video
main = Blueprint('main', __name__)
main = Blueprint("main", __name__)
@main.route('/user/<name>')
@main.route("/user/<name>")
def create_user(name):
"""Create_user."""
user = User(name=name)
db.session.add(user)
db.session.commit()
return 'Created User!'
return "Created User!"

View File

@ -1,3 +1,4 @@
"""Data_store_service."""
from crc import session
from crc.models.data_store import DataStoreModel
from crc.models.workflow import WorkflowModel
@ -8,6 +9,8 @@ from spiff_workflow_webapp.api.api_error import ApiError
class DataStoreBase:
"""DataStoreBase."""
def set_validate_common(
self, task_id, study_id, workflow_id, script_name, user_id, file_id, *args
):
@ -44,10 +47,12 @@ class DataStoreBase:
def get_validate_common(
self, script_name, study_id=None, user_id=None, file_id=None, *args
):
# This method uses a temporary validation_data_store that is only available for the current validation request.
# This allows us to set data_store values during validation that don't affect the real data_store.
# For data_store `gets`, we first look in the temporary validation_data_store.
# If we don't find an entry in validation_data_store, we look in the real data_store.
"""This method uses a temporary validation_data_store that is only available for the current validation request.
This allows us to set data_store values during validation that don't affect the real data_store.
For data_store `gets`, we first look in the temporary validation_data_store.
If we don't find an entry in validation_data_store, we look in the real data_store.
"""
key = args[0]
if script_name == "study_data_get":
# If it's in the validation data store, return it
@ -107,7 +112,7 @@ class DataStoreBase:
def set_data_common(
self, task_spec, study_id, user_id, workflow_id, script_name, file_id, *args
):
"""Set_data_common."""
self.check_args_2(args, script_name=script_name)
key = args[0]
value = args[1]
@ -162,6 +167,7 @@ class DataStoreBase:
return dsm.value
def get_data_common(self, study_id, user_id, script_name, file_id=None, *args):
"""Get_data_common."""
self.check_args(args, 2, script_name)
record = (
session.query(DataStoreModel)
@ -177,6 +183,7 @@ class DataStoreBase:
@staticmethod
def get_multi_common(study_id, user_id, file_id=None):
"""Get_multi_common."""
results = session.query(DataStoreModel).filter_by(
study_id=study_id, user_id=user_id, file_id=file_id
)
@ -184,7 +191,7 @@ class DataStoreBase:
@staticmethod
def delete_data_store(study_id, user_id, file_id, *args):
"""Delete_data_store."""
query = session.query(DataStoreModel).filter(DataStoreModel.key == args[0])
if user_id:
query = query.filter(DataStoreModel.user_id == user_id)
@ -200,8 +207,10 @@ class DataStoreBase:
@staticmethod
def delete_extra_data_stores(records):
"""We had a bug where we created new records instead of updating existing records.
We use this to clean up all the extra records.
We may remove this method in the future."""
We may remove this method in the future.
"""
for record in records:
session.query(DataStoreModel).filter(
DataStoreModel.id == record.id

View File

@ -1,40 +1,40 @@
"""Spiff Workflow Connector."""
import argparse
import json
import sys
import traceback
import json
from jinja2 import Template
from SpiffWorkflow.task import Task
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.specs.ManualTask import ManualTask
from SpiffWorkflow.bpmn.specs.ScriptTask import ScriptTask
from SpiffWorkflow.bpmn.specs.events.event_types import CatchingEvent, ThrowingEvent
from SpiffWorkflow.camunda.parser.CamundaParser import CamundaParser
from SpiffWorkflow.camunda.specs.UserTask import EnumFormField, UserTask
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
from SpiffWorkflow.camunda.serializer.task_spec_converters import UserTaskConverter
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter
from custom_script_engine import CustomScriptEngine
from jinja2 import Template
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
from SpiffWorkflow.bpmn.specs.events.event_types import CatchingEvent
from SpiffWorkflow.bpmn.specs.events.event_types import ThrowingEvent
from SpiffWorkflow.bpmn.specs.ManualTask import ManualTask
from SpiffWorkflow.bpmn.specs.ScriptTask import ScriptTask
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.camunda.parser.CamundaParser import CamundaParser
from SpiffWorkflow.camunda.serializer.task_spec_converters import UserTaskConverter
from SpiffWorkflow.camunda.specs.UserTask import EnumFormField
from SpiffWorkflow.camunda.specs.UserTask import UserTask
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter
from SpiffWorkflow.task import Task
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(
[UserTaskConverter, BusinessRuleTaskConverter])
[UserTaskConverter, BusinessRuleTaskConverter]
)
serializer = BpmnWorkflowSerializer(wf_spec_converter)
class Parser(BpmnDmnParser):
"""Parser."""
OVERRIDE_PARSER_CLASSES = BpmnDmnParser.OVERRIDE_PARSER_CLASSES
OVERRIDE_PARSER_CLASSES.update(CamundaParser.OVERRIDE_PARSER_CLASSES)
def parse(process, bpmn_files, dmn_files):
"""Parse."""
parser = Parser()
parser.add_bpmn_files(bpmn_files)
if dmn_files:
@ -43,7 +43,7 @@ def parse(process, bpmn_files, dmn_files):
def select_option(prompt, options):
"""Select_option."""
option = input(prompt)
while option not in options:
print("Invalid selection")
@ -52,33 +52,33 @@ def select_option(prompt, options):
def display_task(task):
print(f'\n{task.task_spec.description}')
"""Display_task."""
print(f"\n{task.task_spec.description}")
if task.task_spec.documentation is not None:
template = Template(task.task_spec.documentation)
print(template.render(task.data))
def format_task(task, include_state=True):
if hasattr(task.task_spec, 'lane') and task.task_spec.lane is not None:
lane = f'[{task.task_spec.lane}]'
"""Format_task."""
if hasattr(task.task_spec, "lane") and task.task_spec.lane is not None:
lane = f"[{task.task_spec.lane}]"
else:
lane = ''
state = f'[{task.get_state_name()}]' if include_state else ''
return f'{lane} {task.task_spec.description} ({task.task_spec.name}) {state}'
lane = ""
state = f"[{task.get_state_name()}]" if include_state else ""
return f"{lane} {task.task_spec.description} ({task.task_spec.name}) {state}"
def complete_user_task(task):
"""Complete_user_task."""
display_task(task)
if task.data is None:
task.data = {}
for field in task.task_spec.form.fields:
if isinstance(field, EnumFormField):
option_map = dict([(opt.name, opt.id) for opt in field.options])
options = "(" + ', '.join(option_map) + ")"
option_map = {opt.name: opt.id for opt in field.options}
options = "(" + ", ".join(option_map) + ")"
prompt = f"{field.label} {options} "
option = select_option(prompt, option_map.keys())
response = option_map[option]
@ -90,33 +90,39 @@ def complete_user_task(task):
def complete_manual_task(task):
"""Complete_manual_task."""
display_task(task)
input("Press any key to mark task complete")
def print_state(workflow):
"""Print_state."""
task = workflow.last_task
print('\nLast Task')
print("\nLast Task")
print(format_task(task))
print(json.dumps(task.data, indent=2, separators=[', ', ': ']))
print(json.dumps(task.data, indent=2, separators=[", ", ": "]))
display_types = (UserTask, ManualTask, ScriptTask, ThrowingEvent, CatchingEvent)
all_tasks = [task for task in workflow.get_tasks() if isinstance(task.task_spec, display_types)]
upcoming_tasks = [task for task in all_tasks if task.state in [Task.READY, Task.WAITING]]
all_tasks = [
task
for task in workflow.get_tasks()
if isinstance(task.task_spec, display_types)
]
upcoming_tasks = [
task for task in all_tasks if task.state in [Task.READY, Task.WAITING]
]
print('\nUpcoming Tasks')
for idx, task in enumerate(upcoming_tasks):
print("\nUpcoming Tasks")
for _idx, task in enumerate(upcoming_tasks):
print(format_task(task))
if input('\nShow all tasks? ').lower() == 'y':
for idx, task in enumerate(all_tasks):
if input("\nShow all tasks? ").lower() == "y":
for _idx, task in enumerate(all_tasks):
print(format_task(task))
def run(workflow, step):
"""Run."""
workflow.do_engine_steps()
while not workflow.is_completed():
@ -127,20 +133,22 @@ def run(workflow, step):
for idx, task in enumerate(ready_tasks):
option = format_task(task, False)
options[str(idx + 1)] = task
print(f'{idx + 1}. {option}')
print(f"{idx + 1}. {option}")
selected = None
while selected not in options and selected not in ['', 'D', 'd', "exit"]:
selected = input('Select task to complete, enter to wait, or D to dump the workflow state: ')
while selected not in options and selected not in ["", "D", "d", "exit"]:
selected = input(
"Select task to complete, enter to wait, or D to dump the workflow state: "
)
if selected.lower() == 'd':
filename = input('Enter filename: ')
if selected.lower() == "d":
filename = input("Enter filename: ")
state = serializer.serialize_json(workflow)
with open(filename, 'w') as dump:
with open(filename, "w") as dump:
dump.write(state)
elif selected == 'exit':
elif selected == "exit":
exit()
elif selected != '':
elif selected != "":
next_task = options[selected]
if isinstance(next_task.task_spec, UserTask):
complete_user_task(next_task)
@ -156,18 +164,34 @@ def run(workflow, step):
if step:
print_state(workflow)
print('\nWorkflow Data')
print(json.dumps(workflow.data, indent=2, separators=[', ', ': ']))
print("\nWorkflow Data")
print(json.dumps(workflow.data, indent=2, separators=[", ", ": "]))
if __name__ == '__main__':
if __name__ == "__main__":
parser = argparse.ArgumentParser('Simple BPMN runner')
parser.add_argument('-p', '--process', dest='process', help='The top-level BPMN Process ID')
parser.add_argument('-b', '--bpmn', dest='bpmn', nargs='+', help='BPMN files to load')
parser.add_argument('-d', '--dmn', dest='dmn', nargs='*', help='DMN files to load')
parser.add_argument('-r', '--restore', dest='restore', metavar='FILE', help='Restore state from %(metavar)s')
parser.add_argument('-s', '--step', dest='step', action='store_true', help='Display state after each step')
parser = argparse.ArgumentParser("Simple BPMN runner")
parser.add_argument(
"-p", "--process", dest="process", help="The top-level BPMN Process ID"
)
parser.add_argument(
"-b", "--bpmn", dest="bpmn", nargs="+", help="BPMN files to load"
)
parser.add_argument("-d", "--dmn", dest="dmn", nargs="*", help="DMN files to load")
parser.add_argument(
"-r",
"--restore",
dest="restore",
metavar="FILE",
help="Restore state from %(metavar)s",
)
parser.add_argument(
"-s",
"--step",
dest="step",
action="store_true",
help="Display state after each step",
)
args = parser.parse_args()
try:
@ -177,6 +201,6 @@ if __name__ == '__main__':
else:
wf = parse(args.process, args.bpmn, args.dmn)
run(wf, args.step)
except Exception as exc:
except Exception:
sys.stderr.write(traceback.format_exc())
sys.exit(1)