wip: attempting to create process instance w/ burnettk
This commit is contained in:
parent
797116a727
commit
bb530d1744
|
@ -1,9 +1,12 @@
|
|||
from __future__ import with_statement
|
||||
|
||||
import logging
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from flask import current_app
|
||||
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
@ -11,17 +14,17 @@ config = context.config
|
|||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
logger = logging.getLogger("alembic.env")
|
||||
logger = logging.getLogger('alembic.env')
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
config.set_main_option(
|
||||
"sqlalchemy.url",
|
||||
str(current_app.extensions["migrate"].db.get_engine().url).replace("%", "%%"),
|
||||
)
|
||||
target_metadata = current_app.extensions["migrate"].db.metadata
|
||||
'sqlalchemy.url',
|
||||
str(current_app.extensions['migrate'].db.get_engine().url).replace(
|
||||
'%', '%%'))
|
||||
target_metadata = current_app.extensions['migrate'].db.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
|
@ -42,7 +45,9 @@ def run_migrations_offline():
|
|||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
|
||||
context.configure(
|
||||
url=url, target_metadata=target_metadata, literal_binds=True
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
@ -60,20 +65,20 @@ def run_migrations_online():
|
|||
# when there are no changes to the schema
|
||||
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
|
||||
def process_revision_directives(context, revision, directives):
|
||||
if getattr(config.cmd_opts, "autogenerate", False):
|
||||
if getattr(config.cmd_opts, 'autogenerate', False):
|
||||
script = directives[0]
|
||||
if script.upgrade_ops.is_empty():
|
||||
directives[:] = []
|
||||
logger.info("No changes in schema detected.")
|
||||
logger.info('No changes in schema detected.')
|
||||
|
||||
connectable = current_app.extensions["migrate"].db.get_engine()
|
||||
connectable = current_app.extensions['migrate'].db.get_engine()
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
process_revision_directives=process_revision_directives,
|
||||
**current_app.extensions["migrate"].configure_args
|
||||
**current_app.extensions['migrate'].configure_args
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
|
|
|
@ -1,112 +0,0 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 0b87e01fcf6a
|
||||
Revises:
|
||||
Create Date: 2022-05-23 17:18:02.263955
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0b87e01fcf6a'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('group',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=True),
|
||||
sa.Column('new_name_two', sa.String(length=255), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('process_group',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=50), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('user',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('username', sa.String(length=50), nullable=False),
|
||||
sa.Column('name', sa.String(length=50), nullable=True),
|
||||
sa.Column('email', sa.String(length=50), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('username')
|
||||
)
|
||||
op.create_table('principal',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('group_id', sa.Integer(), nullable=True),
|
||||
sa.CheckConstraint('NOT(user_id IS NULL AND group_id IS NULL)'),
|
||||
sa.ForeignKeyConstraint(['group_id'], ['group.id'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('process_model',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('process_group_id', sa.Integer(), nullable=False),
|
||||
sa.Column('version', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=50), nullable=True),
|
||||
sa.ForeignKeyConstraint(['process_group_id'], ['process_group.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('user_group_assignment',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('group_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['group_id'], ['group.id'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('user_id', 'group_id', name='user_group_assignment_unique')
|
||||
)
|
||||
op.create_table('process_instance',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('process_model_id', sa.Integer(), nullable=False),
|
||||
sa.Column('bpmn_json', sa.JSON(), nullable=True),
|
||||
sa.Column('start_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('end_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('process_initiator_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['process_model_id'], ['process_model.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('permission_target',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('process_group_id', sa.Integer(), nullable=True),
|
||||
sa.Column('process_model_id', sa.Integer(), nullable=True),
|
||||
sa.Column('process_instance_id', sa.Integer(), nullable=True),
|
||||
sa.CheckConstraint('NOT(process_group_id IS NULL AND process_model_id IS NULL AND process_instance_id IS NULL)'),
|
||||
sa.ForeignKeyConstraint(['process_group_id'], ['process_group.id'], ),
|
||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||
sa.ForeignKeyConstraint(['process_model_id'], ['process_model.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('permission_assignment',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('principal_id', sa.Integer(), nullable=False),
|
||||
sa.Column('permission_target_id', sa.Integer(), nullable=False),
|
||||
sa.Column('grant_type', sa.Enum('grant', 'deny', name='grantdeny'), nullable=True),
|
||||
sa.Column('permission', sa.Enum('instantiate', 'administer', 'view_instance', name='permission'), nullable=True),
|
||||
sa.ForeignKeyConstraint(['permission_target_id'], ['permission_target.id'], ),
|
||||
sa.ForeignKeyConstraint(['principal_id'], ['principal.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('permission_assignment')
|
||||
op.drop_table('permission_target')
|
||||
op.drop_table('process_instance')
|
||||
op.drop_table('user_group_assignment')
|
||||
op.drop_table('process_model')
|
||||
op.drop_table('principal')
|
||||
op.drop_table('user')
|
||||
op.drop_table('process_group')
|
||||
op.drop_table('group')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,132 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: dec9751e0e3d
|
||||
Revises:
|
||||
Create Date: 2022-05-26 16:18:46.472592
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'dec9751e0e3d'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('admin_session',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('token', sa.String(length=50), nullable=True),
|
||||
sa.Column('admin_impersonate_uid', sa.String(length=50), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('token')
|
||||
)
|
||||
op.create_table('group',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=True),
|
||||
sa.Column('new_name_two', sa.String(length=255), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('user',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('username', sa.String(length=50), nullable=False),
|
||||
sa.Column('uid', sa.String(length=50), nullable=True),
|
||||
sa.Column('name', sa.String(length=50), nullable=True),
|
||||
sa.Column('email', sa.String(length=50), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('uid'),
|
||||
sa.UniqueConstraint('username')
|
||||
)
|
||||
op.create_table('process_instance',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('process_model_identifier', sa.String(), nullable=False),
|
||||
sa.Column('bpmn_json', sa.JSON(), nullable=True),
|
||||
sa.Column('start_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('end_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('process_initiator_id', sa.Integer(), nullable=False),
|
||||
sa.Column('status', sa.Enum('not_started', 'user_input_required', 'waiting', 'complete', 'erroring', name='processinstancestatus'), nullable=True),
|
||||
sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_process_instance_process_model_identifier'), 'process_instance', ['process_model_identifier'], unique=False)
|
||||
op.create_table('user_group_assignment',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('group_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['group_id'], ['group.id'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('user_id', 'group_id', name='user_group_assignment_unique')
|
||||
)
|
||||
op.create_table('file',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('type', sa.String(), nullable=False),
|
||||
sa.Column('content_type', sa.String(), nullable=False),
|
||||
sa.Column('process_instance_id', sa.Integer(), nullable=True),
|
||||
sa.Column('task_spec', sa.String(), nullable=True),
|
||||
sa.Column('irb_doc_code', sa.String(), nullable=False),
|
||||
sa.Column('md5_hash', sa.String(), nullable=False),
|
||||
sa.Column('data', sa.LargeBinary(), nullable=True),
|
||||
sa.Column('size', sa.Integer(), nullable=True),
|
||||
sa.Column('date_modified', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('date_created', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('user_uid', sa.String(), nullable=True),
|
||||
sa.Column('archived', sa.Boolean(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||
sa.ForeignKeyConstraint(['user_uid'], ['user.uid'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('task_event',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_uid', sa.String(), nullable=False),
|
||||
sa.Column('process_instance_id', sa.Integer(), nullable=False),
|
||||
sa.Column('spec_version', sa.String(), nullable=True),
|
||||
sa.Column('action', sa.String(), nullable=True),
|
||||
sa.Column('task_id', sa.String(), nullable=True),
|
||||
sa.Column('task_name', sa.String(), nullable=True),
|
||||
sa.Column('task_title', sa.String(), nullable=True),
|
||||
sa.Column('task_type', sa.String(), nullable=True),
|
||||
sa.Column('task_state', sa.String(), nullable=True),
|
||||
sa.Column('task_lane', sa.String(), nullable=True),
|
||||
sa.Column('form_data', sa.JSON(), nullable=True),
|
||||
sa.Column('mi_type', sa.String(), nullable=True),
|
||||
sa.Column('mi_count', sa.Integer(), nullable=True),
|
||||
sa.Column('mi_index', sa.Integer(), nullable=True),
|
||||
sa.Column('process_name', sa.String(), nullable=True),
|
||||
sa.Column('date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('data_store',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('last_updated', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('key', sa.String(), nullable=False),
|
||||
sa.Column('process_instance_id', sa.Integer(), nullable=True),
|
||||
sa.Column('task_spec', sa.String(), nullable=True),
|
||||
sa.Column('spec_id', sa.String(), nullable=True),
|
||||
sa.Column('user_id', sa.String(), nullable=True),
|
||||
sa.Column('file_id', sa.Integer(), nullable=True),
|
||||
sa.Column('value', sa.String(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['file_id'], ['file.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('data_store')
|
||||
op.drop_table('task_event')
|
||||
op.drop_table('file')
|
||||
op.drop_table('user_group_assignment')
|
||||
op.drop_index(op.f('ix_process_instance_process_model_identifier'), table_name='process_instance')
|
||||
op.drop_table('process_instance')
|
||||
op.drop_table('user')
|
||||
op.drop_table('group')
|
||||
op.drop_table('admin_session')
|
||||
# ### end Alembic commands ###
|
|
@ -997,6 +997,35 @@ docs = ["sphinx (==4.4.0)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "
|
|||
lint = ["mypy (==0.940)", "flake8 (==4.0.1)", "flake8-bugbear (==22.1.11)", "pre-commit (>=2.4,<3.0)"]
|
||||
tests = ["pytest", "pytz", "simplejson"]
|
||||
|
||||
[[package]]
|
||||
name = "marshmallow-enum"
|
||||
version = "1.5.1"
|
||||
description = "Enum field for Marshmallow"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
marshmallow = ">=2.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "marshmallow-sqlalchemy"
|
||||
version = "0.28.0"
|
||||
description = "SQLAlchemy integration with the marshmallow (de)serialization library"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
marshmallow = ">=3.0.0"
|
||||
SQLAlchemy = ">=1.3.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["pytest", "pytest-lazy-fixture (>=0.6.2)", "flake8 (==4.0.1)", "flake8-bugbear (==22.1.11)", "pre-commit (>=2.0,<3.0)", "tox"]
|
||||
docs = ["sphinx (==4.4.0)", "alabaster (==0.7.12)", "sphinx-issues (==3.0.1)"]
|
||||
lint = ["flake8 (==4.0.1)", "flake8-bugbear (==22.1.11)", "pre-commit (>=2.0,<3.0)"]
|
||||
tests = ["pytest", "pytest-lazy-fixture (>=0.6.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "mccabe"
|
||||
version = "0.6.1"
|
||||
|
@ -1948,7 +1977,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-
|
|||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.8"
|
||||
content-hash = "d0f2edc038a129a994d9b2d2fd9a30df000450d062ef183316197f54775fce5a"
|
||||
content-hash = "499cc3206c9880d05e18e6ed6c156759b242e75ee4c979c86ea99adeb2d99153"
|
||||
|
||||
[metadata.files]
|
||||
alabaster = [
|
||||
|
@ -2577,6 +2606,14 @@ marshmallow = [
|
|||
{file = "marshmallow-3.15.0-py3-none-any.whl", hash = "sha256:ff79885ed43b579782f48c251d262e062bce49c65c52412458769a4fb57ac30f"},
|
||||
{file = "marshmallow-3.15.0.tar.gz", hash = "sha256:2aaaab4f01ef4f5a011a21319af9fce17ab13bf28a026d1252adab0e035648d5"},
|
||||
]
|
||||
marshmallow-enum = [
|
||||
{file = "marshmallow-enum-1.5.1.tar.gz", hash = "sha256:38e697e11f45a8e64b4a1e664000897c659b60aa57bfa18d44e226a9920b6e58"},
|
||||
{file = "marshmallow_enum-1.5.1-py2.py3-none-any.whl", hash = "sha256:57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072"},
|
||||
]
|
||||
marshmallow-sqlalchemy = [
|
||||
{file = "marshmallow-sqlalchemy-0.28.0.tar.gz", hash = "sha256:fb6b06686f38fec2ea0ec53a5ee4979219409e2b2260f9bc91e4b43105d19782"},
|
||||
{file = "marshmallow_sqlalchemy-0.28.0-py2.py3-none-any.whl", hash = "sha256:f1b977c323ac0ccc0456b15d4eb9bff413b92c72d7a165f263dc276dd3782cf4"},
|
||||
]
|
||||
mccabe = [
|
||||
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
|
||||
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
|
||||
|
|
|
@ -39,6 +39,8 @@ psycopg2 = "^2.9.3"
|
|||
typing-extensions = "^4.2.0"
|
||||
connexion = "^2.13.1"
|
||||
lxml = "^4.8.0"
|
||||
marshmallow-enum = "^1.5.1"
|
||||
marshmallow-sqlalchemy = "^0.28.0"
|
||||
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
|
|
|
@ -6,8 +6,10 @@ info:
|
|||
name: MIT
|
||||
servers:
|
||||
- url: http://localhost:5000/v1.0
|
||||
# security:
|
||||
# - jwt: ['secret']
|
||||
security:
|
||||
- jwt: ['secret']
|
||||
|
||||
|
||||
paths:
|
||||
/workflow-specification:
|
||||
# get:
|
||||
|
@ -118,7 +120,7 @@ paths:
|
|||
# schema:
|
||||
# $ref: "#/components/schemas/WorkflowSpec"
|
||||
post:
|
||||
operationId: crc.api.workflow.get_workflow_from_spec
|
||||
operationId: spiff_workflow_webapp.routes.process_api_blueprint.create_process_instance
|
||||
summary: Creates a workflow from a workflow spec and returns the workflow
|
||||
tags:
|
||||
- Workflow Specifications
|
||||
|
@ -219,10 +221,13 @@ paths:
|
|||
# '204':
|
||||
# description: The file was removed.
|
||||
|
||||
|
||||
|
||||
|
||||
components:
|
||||
securitySchemes:
|
||||
jwt:
|
||||
type: http
|
||||
scheme: bearer
|
||||
bearerFormat: JWT
|
||||
x-bearerInfoFunc: spiff_workflow_webapp.routes.user.verify_token
|
||||
schemas:
|
||||
User:
|
||||
properties:
|
||||
|
|
|
@ -12,7 +12,7 @@ class DataStoreModel(db.Model):
|
|||
id = db.Column(db.Integer, primary_key=True)
|
||||
last_updated = db.Column(db.DateTime(timezone=True), server_default=func.now())
|
||||
key = db.Column(db.String, nullable=False)
|
||||
workflow_id = db.Column(db.Integer)
|
||||
process_instance_id = db.Column(db.Integer)
|
||||
task_spec = db.Column(db.String)
|
||||
spec_id = db.Column(db.String)
|
||||
user_id = db.Column(db.String, nullable=True)
|
||||
|
|
|
@ -17,7 +17,7 @@ class FileModel(db.Model):
|
|||
name = db.Column(db.String, nullable=False)
|
||||
type = db.Column(db.String, nullable=False)
|
||||
content_type = db.Column(db.String, nullable=False)
|
||||
workflow_id = db.Column(db.Integer, db.ForeignKey('workflow.id'), nullable=True)
|
||||
process_instance_id = db.Column(db.Integer, db.ForeignKey('process_instance.id'), nullable=True)
|
||||
task_spec = db.Column(db.String, nullable=True)
|
||||
irb_doc_code = db.Column(db.String, nullable=False) # Code reference to the documents.xlsx reference file.
|
||||
data_stores = relationship(DataStoreModel, cascade="all,delete", backref="file")
|
||||
|
@ -86,7 +86,7 @@ class File(object):
|
|||
self.content_type = None
|
||||
self.name = None
|
||||
self.content_type = None
|
||||
self.workflow_id = None
|
||||
self.process_instance_id = None
|
||||
self.irb_doc_code = None
|
||||
self.type = None
|
||||
self.document = {}
|
||||
|
@ -117,7 +117,7 @@ class FileSchema(Schema):
|
|||
class Meta:
|
||||
"""Meta."""
|
||||
model = File
|
||||
fields = ["id", "name", "content_type", "workflow_id",
|
||||
fields = ["id", "name", "content_type", "process_instance_id",
|
||||
"irb_doc_code", "last_modified", "type", "archived",
|
||||
"size", "data_store", "document", "user_uid", "url"]
|
||||
unknown = INCLUDE
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
"""Task_event."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import enum
|
||||
|
||||
from marshmallow import INCLUDE, fields, Schema
|
||||
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from spiff_workflow_webapp.models.process_instance import ProcessInstanceMetadataSchema
|
||||
from crc.models.workflow import WorkflowModel
|
||||
from crc.services.ldap_service import LdapService
|
||||
from sqlalchemy import func
|
||||
|
||||
|
||||
|
@ -55,12 +55,11 @@ class TaskEventModelSchema(SQLAlchemyAutoSchema):
|
|||
class TaskEvent(object):
|
||||
"""TaskEvent."""
|
||||
|
||||
def __init__(self, model: TaskEventModel, process_instance: WorkflowModel):
|
||||
def __init__(self, model: TaskEventModel, process_instance: "ProcessInstanceModel"):
|
||||
"""__init__."""
|
||||
self.id = model.id
|
||||
self.process_instance = process_instance
|
||||
self.user_uid = model.user_uid
|
||||
self.user_display = LdapService.user_info(model.user_uid).display_name
|
||||
self.action = model.action
|
||||
self.task_id = model.task_id
|
||||
self.task_title = model.task_title
|
||||
|
@ -74,12 +73,12 @@ class TaskEvent(object):
|
|||
class TaskEventSchema(Schema):
|
||||
"""TaskEventSchema."""
|
||||
|
||||
process_instance = fields.Nested(ProcessInstanceMetadataSchema, dump_only=True)
|
||||
process_instance = fields.Nested("ProcessInstanceMetadataSchema", dump_only=True)
|
||||
task_lane = fields.String(allow_none=True, required=False)
|
||||
|
||||
class Meta:
|
||||
"""Meta."""
|
||||
model = TaskEvent
|
||||
additional = ["id", "user_uid", "user_display", "action", "task_id", "task_title",
|
||||
additional = ["id", "user_uid", "action", "task_id", "task_title",
|
||||
"task_name", "task_type", "task_state", "task_lane", "date"]
|
||||
unknown = INCLUDE
|
||||
|
|
|
@ -1,8 +1,12 @@
|
|||
"""User."""
|
||||
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
|
||||
from marshmallow import Schema
|
||||
import marshmallow
|
||||
from flask_bpmn.models.db import db
|
||||
from sqlalchemy.orm import relationship # type: ignore
|
||||
|
||||
from spiff_workflow_webapp.models.user_group_assignment import UserGroupAssignmentModel
|
||||
from spiff_workflow_webapp.models.group import GroupModel
|
||||
|
||||
|
||||
class UserModel(db.Model): # type: ignore
|
||||
"""UserModel."""
|
||||
|
@ -10,6 +14,7 @@ class UserModel(db.Model): # type: ignore
|
|||
__tablename__ = "user"
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
username = db.Column(db.String(50), nullable=False, unique=True)
|
||||
uid = db.Column(db.String(50), unique=True)
|
||||
name = db.Column(db.String(50))
|
||||
email = db.Column(db.String(50))
|
||||
user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete")
|
||||
|
@ -21,10 +26,20 @@ class UserModel(db.Model): # type: ignore
|
|||
)
|
||||
|
||||
|
||||
class UserModelSchema(SQLAlchemyAutoSchema):
|
||||
class UserModelSchema(Schema):
|
||||
"""UserModelSchema."""
|
||||
class Meta:
|
||||
"""Meta."""
|
||||
model = UserModel
|
||||
load_instance = True
|
||||
include_relationships = True
|
||||
# load_instance = True
|
||||
# include_relationships = False
|
||||
# exclude = ("UserGroupAssignment",)
|
||||
id = marshmallow.fields.String(required=True)
|
||||
username = marshmallow.fields.String(required=True)
|
||||
|
||||
|
||||
class AdminSessionModel(db.Model):
|
||||
__tablename__ = 'admin_session'
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
token = db.Column(db.String(50), unique=True)
|
||||
admin_impersonate_uid = db.Column(db.String(50))
|
||||
|
|
|
@ -3,9 +3,6 @@ from flask_bpmn.models.db import db
|
|||
from sqlalchemy import ForeignKey # type: ignore
|
||||
from sqlalchemy.orm import relationship # type: ignore
|
||||
|
||||
from spiff_workflow_webapp.models.group import GroupModel
|
||||
from spiff_workflow_webapp.models.user import UserModel
|
||||
|
||||
|
||||
class UserGroupAssignmentModel(db.Model): # type: ignore
|
||||
"""UserGroupAssignmentModel."""
|
||||
|
@ -15,7 +12,7 @@ class UserGroupAssignmentModel(db.Model): # type: ignore
|
|||
db.UniqueConstraint("user_id", "group_id", name="user_group_assignment_unique"),
|
||||
)
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
user_id = db.Column(ForeignKey(UserModel.id), nullable=False)
|
||||
group_id = db.Column(ForeignKey(GroupModel.id), nullable=False)
|
||||
user_id = db.Column(ForeignKey("user.id"), nullable=False)
|
||||
group_id = db.Column(ForeignKey("group.id"), nullable=False)
|
||||
group = relationship("GroupModel", overlaps="groups,user_group_assignments,users")
|
||||
user = relationship("UserModel", overlaps="groups,user_group_assignments,users")
|
||||
|
|
|
@ -13,6 +13,7 @@ from spiff_workflow_webapp.models.process_model import ProcessModelInfoSchema
|
|||
from spiff_workflow_webapp.models.process_instance import ProcessInstanceApiSchema
|
||||
from spiff_workflow_webapp.services.process_model_service import ProcessModelService
|
||||
from spiff_workflow_webapp.services.process_instance_service import ProcessInstanceService
|
||||
from spiff_workflow_webapp.services.process_instance_processor import ProcessInstanceProcessor
|
||||
from spiff_workflow_webapp.services.spec_file_service import SpecFileService
|
||||
from spiff_workflow_webapp.models.file import FileSchema, FileType
|
||||
|
||||
|
@ -58,8 +59,8 @@ def add_file(spec_id):
|
|||
|
||||
def create_process_instance(spec_id):
|
||||
"""Create_process_instance."""
|
||||
workflow_model = ProcessInstanceService.create_process_instance(spec_id, g.user)
|
||||
processor = WorkflowProcessor(workflow_model)
|
||||
process_instance = ProcessInstanceService.create_process_instance(spec_id, g.user)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
|
||||
processor.do_engine_steps()
|
||||
processor.save()
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
import flask
|
||||
from flask import g, request, current_app
|
||||
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from spiff_workflow_webapp.models.user import UserModel
|
||||
|
||||
"""
|
||||
.. module:: crc.api.user
|
||||
:synopsis: Single Sign On (SSO) user login and session handlers
|
||||
"""
|
||||
|
||||
|
||||
def verify_token(token=None):
|
||||
"""
|
||||
Verifies the token for the user (if provided). If in production environment and token is not provided,
|
||||
gets user from the SSO headers and returns their token.
|
||||
|
||||
Args:
|
||||
token: Optional[str]
|
||||
|
||||
Returns:
|
||||
token: str
|
||||
|
||||
Raises:
|
||||
ApiError. If not on production and token is not valid, returns an 'invalid_token' 403 error.
|
||||
If on production and user is not authenticated, returns a 'no_user' 403 error.
|
||||
"""
|
||||
|
||||
failure_error = ApiError("invalid_token", "Unable to decode the token you provided. Please re-authenticate",
|
||||
status_code=403)
|
||||
|
||||
if token:
|
||||
try:
|
||||
token_info = UserModel.decode_auth_token(token)
|
||||
g.user = UserModel.query.filter_by(uid=token_info['sub']).first()
|
||||
|
||||
# If the user is valid, store the token for this session
|
||||
if g.user:
|
||||
g.token = token
|
||||
except:
|
||||
raise failure_error
|
||||
if g.user is not None:
|
||||
return token_info
|
||||
else:
|
||||
raise failure_error
|
||||
|
||||
# If there's no token and we're in production, get the user from the SSO headers and return their token
|
||||
elif _is_production():
|
||||
uid = "TEST_UID"
|
||||
|
||||
if uid is not None:
|
||||
db_user = UserModel.query.filter_by(uid=uid).first()
|
||||
|
||||
# If the user is valid, store the user and token for this session
|
||||
if db_user is not None:
|
||||
g.user = db_user
|
||||
token = g.user.encode_auth_token()
|
||||
g.token = token
|
||||
token_info = UserModel.decode_auth_token(token)
|
||||
return token_info
|
||||
|
||||
else:
|
||||
raise ApiError("no_user",
|
||||
"User not found. Please login via the frontend app before accessing this feature.",
|
||||
status_code=403)
|
||||
|
||||
else:
|
||||
# Fall back to a default user if this is not production.
|
||||
g.user = UserModel.query.first()
|
||||
if not g.user:
|
||||
raise ApiError("no_user", "You are in development mode, but there are no users in the database. Add one, and it will use it.")
|
||||
token = g.user.encode_auth_token()
|
||||
token_info = UserModel.decode_auth_token(token)
|
||||
return token_info
|
||||
|
||||
|
||||
def _is_production():
|
||||
return 'PRODUCTION' in current_app.config and current_app.config['PRODUCTION']
|
|
@ -21,13 +21,13 @@ class Script(object):
|
|||
raise ApiError("invalid_script",
|
||||
"This script does not supply a description.")
|
||||
|
||||
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
|
||||
def do_task(self, task, workflow_id, *args, **kwargs):
|
||||
"""Do_task."""
|
||||
raise ApiError("invalid_script",
|
||||
"This is an internal error. The script you are trying to execute '%s' " % self.__class__.__name__
|
||||
+ "does not properly implement the do_task function.")
|
||||
|
||||
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
|
||||
def do_task_validate_only(self, task, workflow_id, *args, **kwargs):
|
||||
"""Do_task_validate_only."""
|
||||
raise ApiError("invalid_script",
|
||||
"This is an internal error. The script you are trying to execute '%s' " % self.__class__.__name__
|
||||
|
@ -35,7 +35,7 @@ class Script(object):
|
|||
+ "but does not make external calls or database updates.")
|
||||
|
||||
@staticmethod
|
||||
def generate_augmented_list(task, study_id, workflow_id):
|
||||
def generate_augmented_list(task, workflow_id):
|
||||
"""This makes a dictionary of lambda functions that are closed over the class instance that they represent.
|
||||
|
||||
This is passed into PythonScriptParser as a list of helper functions that are
|
||||
|
@ -45,7 +45,7 @@ class Script(object):
|
|||
We may be able to remove the task for each of these calls if we are not using it other than potentially
|
||||
updating the task data.
|
||||
"""
|
||||
def make_closure(subclass, task, study_id, workflow_id):
|
||||
def make_closure(subclass, task, workflow_id):
|
||||
"""Yes - this is black magic.
|
||||
|
||||
Essentially, we want to build a list of all of the submodules (i.e. email, user_data_get, etc)
|
||||
|
@ -56,17 +56,17 @@ class Script(object):
|
|||
that we created.
|
||||
"""
|
||||
instance = subclass()
|
||||
return lambda *ar, **kw: subclass.do_task(instance, task, study_id, workflow_id, *ar, **kw)
|
||||
return lambda *ar, **kw: subclass.do_task(instance, task, workflow_id, *ar, **kw)
|
||||
execlist = {}
|
||||
subclasses = Script.get_all_subclasses()
|
||||
for x in range(len(subclasses)):
|
||||
subclass = subclasses[x]
|
||||
execlist[subclass.__module__.split('.')[-1]] = make_closure(subclass, task, study_id,
|
||||
execlist[subclass.__module__.split('.')[-1]] = make_closure(subclass, task,
|
||||
workflow_id)
|
||||
return execlist
|
||||
|
||||
@staticmethod
|
||||
def generate_augmented_validate_list(task, study_id, workflow_id):
|
||||
def generate_augmented_validate_list(task, workflow_id):
|
||||
"""This makes a dictionary of lambda functions that are closed over the class instance that they represent.
|
||||
|
||||
This is passed into PythonScriptParser as a list of helper functions that are
|
||||
|
@ -76,15 +76,15 @@ class Script(object):
|
|||
We may be able to remove the task for each of these calls if we are not using it other than potentially
|
||||
updating the task data.
|
||||
"""
|
||||
def make_closure_validate(subclass, task, study_id, workflow_id):
|
||||
def make_closure_validate(subclass, task, workflow_id):
|
||||
"""Make_closure_validate."""
|
||||
instance = subclass()
|
||||
return lambda *a, **b: subclass.do_task_validate_only(instance, task, study_id, workflow_id, *a, **b)
|
||||
return lambda *a, **b: subclass.do_task_validate_only(instance, task, workflow_id, *a, **b)
|
||||
execlist = {}
|
||||
subclasses = Script.get_all_subclasses()
|
||||
for x in range(len(subclasses)):
|
||||
subclass = subclasses[x]
|
||||
execlist[subclass.__module__.split('.')[-1]] = make_closure_validate(subclass, task, study_id,
|
||||
execlist[subclass.__module__.split('.')[-1]] = make_closure_validate(subclass, task,
|
||||
workflow_id)
|
||||
return execlist
|
||||
|
||||
|
|
|
@ -27,11 +27,13 @@ from flask_bpmn.api.api_error import ApiError
|
|||
from spiff_workflow_webapp.models.file import FileModel, FileType, File
|
||||
from spiff_workflow_webapp.models.task_event import TaskEventModel, TaskAction
|
||||
from spiff_workflow_webapp.models.user import UserModelSchema
|
||||
from spiff_workflow_webapp.models.process_model import ProcessModelInfo
|
||||
from spiff_workflow_webapp.models.process_instance import ProcessInstanceModel, ProcessInstanceStatus
|
||||
from spiff_workflow_webapp.scripts.script import Script
|
||||
from spiff_workflow_webapp.services.spec_file_service import SpecFileService
|
||||
# from crc.services.user_file_service import UserFileService
|
||||
from crc.services.user_service import UserService
|
||||
from crc.services.workflow_spec_service import WorkflowSpecService
|
||||
from spiff_workflow_webapp.services.user_service import UserService
|
||||
from spiff_workflow_webapp.services.process_model_service import ProcessModelService
|
||||
|
||||
|
||||
class CustomBpmnScriptEngine(PythonScriptEngine):
|
||||
|
@ -49,17 +51,16 @@ class CustomBpmnScriptEngine(PythonScriptEngine):
|
|||
"""__get_augment_methods."""
|
||||
methods = []
|
||||
if task:
|
||||
workflow = WorkflowProcessor.find_top_level_workflow(task)
|
||||
study_id = workflow.data[WorkflowProcessor.STUDY_ID_KEY]
|
||||
if WorkflowProcessor.WORKFLOW_ID_KEY in workflow.data:
|
||||
workflow_id = workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY]
|
||||
process_instance = ProcessInstanceProcessor.find_top_level_process_instance(task)
|
||||
if ProcessInstanceProcessor.PROCESS_INSTANCE_ID_KEY in process_instance.data:
|
||||
process_instance_id = process_instance.data[ProcessInstanceProcessor.PROCESS_INSTANCE_ID_KEY]
|
||||
else:
|
||||
workflow_id = None
|
||||
process_instance_id = None
|
||||
|
||||
if workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY]:
|
||||
methods = Script.generate_augmented_validate_list(task, study_id, workflow_id)
|
||||
if process_instance.data[ProcessInstanceProcessor.VALIDATION_PROCESS_KEY]:
|
||||
methods = Script.generate_augmented_validate_list(task, process_instance_id)
|
||||
else:
|
||||
methods = Script.generate_augmented_list(task, study_id, workflow_id)
|
||||
methods = Script.generate_augmented_list(task, process_instance_id)
|
||||
return methods
|
||||
|
||||
def _evaluate(self, expression, context, task=None, external_methods=None):
|
||||
|
@ -91,41 +92,40 @@ class MyCustomParser(BpmnDmnParser):
|
|||
OVERRIDE_PARSER_CLASSES.update(CamundaParser.OVERRIDE_PARSER_CLASSES)
|
||||
|
||||
|
||||
class WorkflowProcessor(object):
|
||||
"""WorkflowProcessor."""
|
||||
class ProcessInstanceProcessor(object):
|
||||
"""ProcessInstanceProcessor."""
|
||||
_script_engine = CustomBpmnScriptEngine()
|
||||
SERIALIZER_VERSION = "1.0-CRC"
|
||||
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(
|
||||
[UserTaskConverter, BusinessRuleTaskConverter])
|
||||
_serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION)
|
||||
_old_serializer = BpmnSerializer()
|
||||
WORKFLOW_ID_KEY = "workflow_id"
|
||||
STUDY_ID_KEY = "study_id"
|
||||
PROCESS_INSTANCE_ID_KEY = "process_instance_id"
|
||||
VALIDATION_PROCESS_KEY = "validate_only"
|
||||
|
||||
def __init__(self, workflow_model: WorkflowModel, validate_only=False):
|
||||
"""Create a Workflow Processor based on the serialized information available in the workflow model."""
|
||||
self.workflow_model = workflow_model
|
||||
self.workflow_spec_service = WorkflowSpecService()
|
||||
def __init__(self, process_instance_model: ProcessInstanceModel, validate_only=False):
|
||||
"""Create a Workflow Processor based on the serialized information available in the process_instance model."""
|
||||
self.process_instance_model = process_instance_model
|
||||
self.process_model_service = ProcessModelService()
|
||||
spec = None
|
||||
if workflow_model.bpmn_workflow_json is None:
|
||||
spec_info = self.workflow_spec_service.get_spec(workflow_model.workflow_spec_id)
|
||||
if process_instance_model.bpmn_json is None:
|
||||
spec_info = self.process_model_service.get_spec(process_instance_model.process_model_id)
|
||||
if spec_info is None:
|
||||
raise (ApiError("missing_spec", "The spec this workflow references does not currently exist."))
|
||||
raise (ApiError("missing_spec", "The spec this process_instance references does not currently exist."))
|
||||
self.spec_files = SpecFileService.get_files(spec_info, include_libraries=True)
|
||||
spec = self.get_spec(self.spec_files, spec_info)
|
||||
else:
|
||||
B = len(workflow_model.bpmn_workflow_json.encode('utf-8'))
|
||||
B = len(process_instance_model.bpmn_json.encode('utf-8'))
|
||||
MB = float(1024 ** 2)
|
||||
json_size = B / MB
|
||||
if json_size > 1:
|
||||
wf_json = json.loads(workflow_model.bpmn_workflow_json)
|
||||
wf_json = json.loads(process_instance_model.bpmn_json)
|
||||
if 'spec' in wf_json and 'tasks' in wf_json:
|
||||
task_tree = wf_json['tasks']
|
||||
test_spec = wf_json['spec']
|
||||
task_size = "{:.2f}".format(len(json.dumps(task_tree).encode('utf-8')) / MB)
|
||||
spec_size = "{:.2f}".format(len(json.dumps(test_spec).encode('utf-8')) / MB)
|
||||
message = 'Workflow ' + workflow_model.workflow_spec_id + \
|
||||
message = 'Workflow ' + process_instance_model.process_model_id + \
|
||||
' JSON Size is over 1MB:{0:.2f} MB'.format(json_size)
|
||||
message += f"\n Task Size: {task_size}"
|
||||
message += f"\n Spec Size: {spec_size}"
|
||||
|
@ -145,45 +145,45 @@ class WorkflowProcessor(object):
|
|||
check_sub_specs(my_spec['spec'], indent + 5)
|
||||
check_sub_specs(test_spec, 5)
|
||||
|
||||
self.workflow_spec_id = workflow_model.workflow_spec_id
|
||||
self.process_model_id = process_instance_model.process_model_id
|
||||
|
||||
try:
|
||||
self.bpmn_workflow = self.__get_bpmn_workflow(workflow_model, spec, validate_only)
|
||||
self.bpmn_workflow.script_engine = self._script_engine
|
||||
self.bpmn_process_instance = self.__get_bpmn_process_instance(process_instance_model, spec, validate_only)
|
||||
self.bpmn_process_instance.script_engine = self._script_engine
|
||||
|
||||
self.add_user_info_to_workflow(self.bpmn_workflow)
|
||||
self.add_user_info_to_process_instance(self.bpmn_process_instance)
|
||||
|
||||
if self.WORKFLOW_ID_KEY not in self.bpmn_workflow.data:
|
||||
if not workflow_model.id:
|
||||
db.session.add(workflow_model)
|
||||
# If the model is new, and has no id, save it, write it into the workflow model
|
||||
if self.PROCESS_INSTANCE_ID_KEY not in self.bpmn_process_instance.data:
|
||||
if not process_instance_model.id:
|
||||
db.session.add(process_instance_model)
|
||||
# If the model is new, and has no id, save it, write it into the process_instance model
|
||||
# and save it again. In this way, the workflow process is always aware of the
|
||||
# database model to which it is associated, and scripts running within the model
|
||||
# can then load data as needed.
|
||||
self.bpmn_workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY] = workflow_model.id
|
||||
workflow_model.bpmn_workflow_json = WorkflowProcessor._serializer.serialize_json(self.bpmn_workflow)
|
||||
self.bpmn_process_instance.data[ProcessInstanceProcessor.PROCESS_INSTANCE_ID_KEY] = process_instance_model.id
|
||||
process_instance_model.bpmn_json = ProcessInstanceProcessor._serializer.serialize_json(self.bpmn_process_instance)
|
||||
|
||||
self.save()
|
||||
|
||||
except MissingSpecError as ke:
|
||||
raise ApiError(code="unexpected_workflow_structure",
|
||||
message="Failed to deserialize workflow"
|
||||
raise ApiError(code="unexpected_process_instance_structure",
|
||||
message="Failed to deserialize process_instance"
|
||||
" '%s' due to a mis-placed or missing task '%s'" %
|
||||
(self.workflow_spec_id, str(ke))) from ke
|
||||
(self.process_model_id, str(ke))) from ke
|
||||
|
||||
@staticmethod
|
||||
def add_user_info_to_workflow(bpmn_workflow):
|
||||
"""Add_user_info_to_workflow."""
|
||||
def add_user_info_to_process_instance(bpmn_process_instance):
|
||||
"""Add_user_info_to_process_instance."""
|
||||
if UserService.has_user():
|
||||
current_user = UserService.current_user(allow_admin_impersonate=True)
|
||||
current_user_data = UserModelSchema().dump(current_user)
|
||||
tasks = bpmn_workflow.get_tasks(TaskState.READY)
|
||||
tasks = bpmn_process_instance.get_tasks(TaskState.READY)
|
||||
for task in tasks:
|
||||
task.data['current_user'] = current_user_data
|
||||
|
||||
@staticmethod
|
||||
def reset(workflow_model, clear_data=False):
|
||||
"""Resets the workflow back to an unstarted state - where nothing has happened yet.
|
||||
def reset(process_instance_model, clear_data=False):
|
||||
"""Resets the process_instance back to an unstarted state - where nothing has happened yet.
|
||||
|
||||
If clear_data is set to false, then the information
|
||||
previously used in forms will be re-populated when the form is re-
|
||||
|
@ -192,89 +192,87 @@ class WorkflowProcessor(object):
|
|||
"""
|
||||
# Try to execute a cancel notify
|
||||
try:
|
||||
bpmn_workflow = WorkflowProcessor.__get_bpmn_workflow(workflow_model)
|
||||
WorkflowProcessor.__cancel_notify(bpmn_workflow)
|
||||
bpmn_process_instance = ProcessInstanceProcessor.__get_bpmn_process_instance(process_instance_model)
|
||||
ProcessInstanceProcessor.__cancel_notify(bpmn_process_instance)
|
||||
except Exception as e:
|
||||
db.session.rollback() # in case the above left the database with a bad transaction
|
||||
current_app.logger.error("Unable to send a cancel notify for workflow %s during a reset."
|
||||
current_app.logger.error("Unable to send a cancel notify for process_instance %s during a reset."
|
||||
" Continuing with the reset anyway so we don't get in an unresolvable"
|
||||
" state. An %s error occured with the following information: %s" %
|
||||
(workflow_model.id, e.__class__.__name__, str(e)))
|
||||
workflow_model.bpmn_workflow_json = None
|
||||
workflow_model.status = WorkflowStatus.not_started
|
||||
(process_instance_model.id, e.__class__.__name__, str(e)))
|
||||
process_instance_model.bpmn_json = None
|
||||
process_instance_model.status = ProcessInstanceStatus.not_started
|
||||
|
||||
# clear out any task assignments
|
||||
db.session.query(TaskEventModel). \
|
||||
filter(TaskEventModel.workflow_id == workflow_model.id). \
|
||||
filter(TaskEventModel.process_instance_id == process_instance_model.id). \
|
||||
filter(TaskEventModel.action == TaskAction.ASSIGNMENT.value).delete()
|
||||
|
||||
if clear_data:
|
||||
# Clear out data in previous task events
|
||||
task_events = db.session.query(TaskEventModel). \
|
||||
filter(TaskEventModel.workflow_id == workflow_model.id).all()
|
||||
filter(TaskEventModel.process_instance_id == process_instance_model.id).all()
|
||||
for task_event in task_events:
|
||||
task_event.form_data = {}
|
||||
db.session.add(task_event)
|
||||
# Remove any uploaded files.
|
||||
files = FileModel.query.filter(FileModel.workflow_id == workflow_model.id).all()
|
||||
|
||||
# TODO: grab UserFileService
|
||||
# files = FileModel.query.filter(FileModel.process_instance_id == process_instance_model.id).all()
|
||||
# for file in files:
|
||||
# UserFileService().delete_file(file.id)
|
||||
db.session.commit()
|
||||
|
||||
@staticmethod
|
||||
def __get_bpmn_workflow(workflow_model: WorkflowModel, spec: WorkflowSpec = None, validate_only=False):
|
||||
"""__get_bpmn_workflow."""
|
||||
if workflow_model.bpmn_workflow_json:
|
||||
version = WorkflowProcessor._serializer.get_version(workflow_model.bpmn_workflow_json)
|
||||
if(version == WorkflowProcessor.SERIALIZER_VERSION):
|
||||
bpmn_workflow = WorkflowProcessor._serializer.deserialize_json(workflow_model.bpmn_workflow_json)
|
||||
def __get_bpmn_process_instance(process_instance_model: ProcessInstanceModel, spec: WorkflowSpec = None, validate_only=False):
|
||||
"""__get_bpmn_process_instance."""
|
||||
if process_instance_model.bpmn_json:
|
||||
version = ProcessInstanceProcessor._serializer.get_version(process_instance_model.bpmn_json)
|
||||
if(version == ProcessInstanceProcessor.SERIALIZER_VERSION):
|
||||
bpmn_process_instance = ProcessInstanceProcessor._serializer.deserialize_json(process_instance_model.bpmn_json)
|
||||
else:
|
||||
bpmn_workflow = WorkflowProcessor.\
|
||||
_old_serializer.deserialize_workflow(workflow_model.bpmn_workflow_json,
|
||||
workflow_spec=spec)
|
||||
bpmn_workflow.script_engine = WorkflowProcessor._script_engine
|
||||
bpmn_process_instance = ProcessInstanceProcessor.\
|
||||
_old_serializer.deserialize_process_instance(process_instance_model.bpmn_json,
|
||||
process_model=spec)
|
||||
bpmn_process_instance.script_engine = ProcessInstanceProcessor._script_engine
|
||||
else:
|
||||
bpmn_workflow = BpmnWorkflow(spec, script_engine=WorkflowProcessor._script_engine)
|
||||
bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = workflow_model.study_id
|
||||
bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = validate_only
|
||||
return bpmn_workflow
|
||||
bpmn_process_instance = BpmnWorkflow(spec, script_engine=ProcessInstanceProcessor._script_engine)
|
||||
bpmn_process_instance.data[ProcessInstanceProcessor.VALIDATION_PROCESS_KEY] = validate_only
|
||||
return bpmn_process_instance
|
||||
|
||||
def save(self):
|
||||
"""Saves the current state of this processor to the database."""
|
||||
self.workflow_model.bpmn_workflow_json = self.serialize()
|
||||
self.process_instance_model.bpmn_json = self.serialize()
|
||||
complete_states = [TaskState.CANCELLED, TaskState.COMPLETED]
|
||||
tasks = list(self.get_all_user_tasks())
|
||||
self.workflow_model.status = self.get_status()
|
||||
self.workflow_model.total_tasks = len(tasks)
|
||||
self.workflow_model.completed_tasks = sum(1 for t in tasks if t.state in complete_states)
|
||||
self.workflow_model.last_updated = datetime.utcnow()
|
||||
db.session.add(self.workflow_model)
|
||||
self.process_instance_model.status = self.get_status()
|
||||
self.process_instance_model.total_tasks = len(tasks)
|
||||
self.process_instance_model.completed_tasks = sum(1 for t in tasks if t.state in complete_states)
|
||||
self.process_instance_model.last_updated = datetime.utcnow()
|
||||
db.session.add(self.process_instance_model)
|
||||
db.session.commit()
|
||||
|
||||
@staticmethod
|
||||
def run_master_spec(spec_model, study):
|
||||
"""Executes a BPMN specification for the given study, without recording any information to the database.
|
||||
def run_master_spec(process_model):
|
||||
"""Executes a BPMN specification for the given process_model, without recording any information to the database.
|
||||
|
||||
Useful for running the master specification, which should not persist.
|
||||
"""
|
||||
spec_files = SpecFileService().get_files(spec_model, include_libraries=True)
|
||||
spec = WorkflowProcessor.get_spec(spec_files, spec_model)
|
||||
spec_files = SpecFileService().get_files(process_model, include_libraries=True)
|
||||
spec = ProcessInstanceProcessor.get_spec(spec_files, process_model)
|
||||
try:
|
||||
bpmn_workflow = BpmnWorkflow(spec, script_engine=WorkflowProcessor._script_engine)
|
||||
bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = study.id
|
||||
bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = False
|
||||
WorkflowProcessor.add_user_info_to_workflow(bpmn_workflow)
|
||||
bpmn_workflow.do_engine_steps()
|
||||
bpmn_process_instance = BpmnWorkflow(spec, script_engine=ProcessInstanceProcessor._script_engine)
|
||||
bpmn_process_instance.data[ProcessInstanceProcessor.VALIDATION_PROCESS_KEY] = False
|
||||
ProcessInstanceProcessor.add_user_info_to_process_instance(bpmn_process_instance)
|
||||
bpmn_process_instance.do_engine_steps()
|
||||
except WorkflowException as we:
|
||||
raise ApiError.from_task_spec("error_running_master_spec", str(we), we.sender) from we
|
||||
|
||||
if not bpmn_workflow.is_completed():
|
||||
if not bpmn_process_instance.is_completed():
|
||||
raise ApiError("master_spec_not_automatic",
|
||||
"The master spec should only contain fully automated tasks, it failed to complete.")
|
||||
|
||||
return bpmn_workflow.last_task.data
|
||||
return bpmn_process_instance.last_task.data
|
||||
|
||||
@staticmethod
|
||||
def get_parser():
|
||||
|
@ -283,25 +281,25 @@ class WorkflowProcessor(object):
|
|||
return parser
|
||||
|
||||
@staticmethod
|
||||
def get_spec(files: List[File], workflow_spec_info: WorkflowSpecInfo):
|
||||
"""Returns a SpiffWorkflow specification for the given workflow spec, using the files provided."""
|
||||
parser = WorkflowProcessor.get_parser()
|
||||
def get_spec(files: List[File], process_model_info: ProcessModelInfo):
|
||||
"""Returns a SpiffWorkflow specification for the given process_instance spec, using the files provided."""
|
||||
parser = ProcessInstanceProcessor.get_parser()
|
||||
|
||||
for file in files:
|
||||
data = SpecFileService.get_data(workflow_spec_info, file.name)
|
||||
data = SpecFileService.get_data(process_model_info, file.name)
|
||||
if file.type == FileType.bpmn.value:
|
||||
bpmn: etree.Element = etree.fromstring(data)
|
||||
parser.add_bpmn_xml(bpmn, filename=file.name)
|
||||
elif file.type == FileType.dmn.value:
|
||||
dmn: etree.Element = etree.fromstring(data)
|
||||
parser.add_dmn_xml(dmn, filename=file.name)
|
||||
if workflow_spec_info.primary_process_id is None or workflow_spec_info.primary_process_id == "":
|
||||
if process_model_info.primary_process_id is None or process_model_info.primary_process_id == "":
|
||||
raise (ApiError(code="no_primary_bpmn_error",
|
||||
message="There is no primary BPMN model defined for workflow %s" % workflow_spec_info.id))
|
||||
message="There is no primary BPMN model defined for process_instance %s" % process_model_info.id))
|
||||
try:
|
||||
spec = parser.get_spec(workflow_spec_info.primary_process_id)
|
||||
spec = parser.get_spec(process_model_info.primary_process_id)
|
||||
except ValidationException as ve:
|
||||
raise ApiError(code="workflow_validation_error",
|
||||
raise ApiError(code="process_instance_validation_error",
|
||||
message="Failed to parse the Workflow Specification. "
|
||||
+ "Error is '%s.'" % str(ve),
|
||||
file_name=ve.filename,
|
||||
|
@ -310,58 +308,58 @@ class WorkflowProcessor(object):
|
|||
return spec
|
||||
|
||||
@staticmethod
|
||||
def status_of(bpmn_workflow):
|
||||
def status_of(bpmn_process_instance):
|
||||
"""Status_of."""
|
||||
if bpmn_workflow.is_completed():
|
||||
return WorkflowStatus.complete
|
||||
user_tasks = bpmn_workflow.get_ready_user_tasks()
|
||||
waiting_tasks = bpmn_workflow.get_tasks(TaskState.WAITING)
|
||||
if bpmn_process_instance.is_completed():
|
||||
return ProcessInstanceStatus.complete
|
||||
user_tasks = bpmn_process_instance.get_ready_user_tasks()
|
||||
waiting_tasks = bpmn_process_instance.get_tasks(TaskState.WAITING)
|
||||
if len(waiting_tasks) > 0:
|
||||
return WorkflowStatus.waiting
|
||||
return ProcessInstanceStatus.waiting
|
||||
if len(user_tasks) > 0:
|
||||
return WorkflowStatus.user_input_required
|
||||
return ProcessInstanceStatus.user_input_required
|
||||
else:
|
||||
return WorkflowStatus.waiting
|
||||
return ProcessInstanceStatus.waiting
|
||||
|
||||
def get_status(self):
|
||||
"""Get_status."""
|
||||
return self.status_of(self.bpmn_workflow)
|
||||
return self.status_of(self.bpmn_process_instance)
|
||||
|
||||
def do_engine_steps(self, exit_at=None):
|
||||
"""Do_engine_steps."""
|
||||
try:
|
||||
self.bpmn_workflow.refresh_waiting_tasks()
|
||||
self.bpmn_workflow.do_engine_steps(exit_at=exit_at)
|
||||
self.bpmn_process_instance.refresh_waiting_tasks()
|
||||
self.bpmn_process_instance.do_engine_steps(exit_at=exit_at)
|
||||
except WorkflowTaskExecException as we:
|
||||
raise ApiError.from_workflow_exception("task_error", str(we), we) from we
|
||||
|
||||
def cancel_notify(self):
|
||||
"""Cancel_notify."""
|
||||
self.__cancel_notify(self.bpmn_workflow)
|
||||
self.__cancel_notify(self.bpmn_process_instance)
|
||||
|
||||
@staticmethod
|
||||
def __cancel_notify(bpmn_workflow):
|
||||
def __cancel_notify(bpmn_process_instance):
|
||||
"""__cancel_notify."""
|
||||
try:
|
||||
# A little hackly, but make the bpmn_workflow catch a cancel event.
|
||||
bpmn_workflow.signal('cancel') # generate a cancel signal.
|
||||
bpmn_workflow.catch(CancelEventDefinition())
|
||||
bpmn_workflow.do_engine_steps()
|
||||
# A little hackly, but make the bpmn_process_instance catch a cancel event.
|
||||
bpmn_process_instance.signal('cancel') # generate a cancel signal.
|
||||
bpmn_process_instance.catch(CancelEventDefinition())
|
||||
bpmn_process_instance.do_engine_steps()
|
||||
except WorkflowTaskExecException as we:
|
||||
raise ApiError.from_workflow_exception("task_error", str(we), we) from we
|
||||
|
||||
def serialize(self):
|
||||
"""Serialize."""
|
||||
return self._serializer.serialize_json(self.bpmn_workflow)
|
||||
return self._serializer.serialize_json(self.bpmn_process_instance)
|
||||
|
||||
def next_user_tasks(self):
|
||||
"""Next_user_tasks."""
|
||||
return self.bpmn_workflow.get_ready_user_tasks()
|
||||
return self.bpmn_process_instance.get_ready_user_tasks()
|
||||
|
||||
def next_task(self):
|
||||
"""Returns the next task that should be completed even if there are parallel tasks and multiple options are available.
|
||||
|
||||
If the workflow is complete
|
||||
If the process_instance is complete
|
||||
it will return the final end task.
|
||||
"""
|
||||
# If the whole blessed mess is done, return the end_event task in the tree
|
||||
|
@ -369,10 +367,10 @@ class WorkflowProcessor(object):
|
|||
# what we really want is the LAST EndEvent
|
||||
|
||||
endtasks = []
|
||||
if self.bpmn_workflow.is_completed():
|
||||
for task in SpiffTask.Iterator(self.bpmn_workflow.task_tree, TaskState.ANY_MASK):
|
||||
# Assure that we find the end event for this workflow, and not for any sub-workflows.
|
||||
if isinstance(task.task_spec, EndEvent) and task.workflow == self.bpmn_workflow:
|
||||
if self.bpmn_process_instance.is_completed():
|
||||
for task in SpiffTask.Iterator(self.bpmn_process_instance.task_tree, TaskState.ANY_MASK):
|
||||
# Assure that we find the end event for this process_instance, and not for any sub-process_instances.
|
||||
if isinstance(task.task_spec, EndEvent) and task.process_instance == self.bpmn_process_instance:
|
||||
endtasks.append(task)
|
||||
return endtasks[-1]
|
||||
|
||||
|
@ -381,11 +379,11 @@ class WorkflowProcessor(object):
|
|||
# a parallel gateway with multiple tasks, so prefer ones that share a parent.
|
||||
|
||||
# Get a list of all ready tasks
|
||||
ready_tasks = self.bpmn_workflow.get_tasks(TaskState.READY)
|
||||
ready_tasks = self.bpmn_process_instance.get_tasks(TaskState.READY)
|
||||
|
||||
if len(ready_tasks) == 0:
|
||||
# If no ready tasks exist, check for a waiting task.
|
||||
waiting_tasks = self.bpmn_workflow.get_tasks(TaskState.WAITING)
|
||||
waiting_tasks = self.bpmn_process_instance.get_tasks(TaskState.WAITING)
|
||||
if len(waiting_tasks) > 0:
|
||||
return waiting_tasks[0]
|
||||
else:
|
||||
|
@ -405,7 +403,7 @@ class WorkflowProcessor(object):
|
|||
if task._is_descendant_of(last_user_task):
|
||||
return task
|
||||
for task in ready_tasks:
|
||||
if self.bpmn_workflow.last_task and task.parent == last_user_task.parent:
|
||||
if self.bpmn_process_instance.last_task and task.parent == last_user_task.parent:
|
||||
return task
|
||||
|
||||
return ready_tasks[0]
|
||||
|
@ -413,16 +411,16 @@ class WorkflowProcessor(object):
|
|||
# If there are no ready tasks, but the thing isn't complete yet, find the first non-complete task
|
||||
# and return that
|
||||
next_task = None
|
||||
for task in SpiffTask.Iterator(self.bpmn_workflow.task_tree, TaskState.NOT_FINISHED_MASK):
|
||||
for task in SpiffTask.Iterator(self.bpmn_process_instance.task_tree, TaskState.NOT_FINISHED_MASK):
|
||||
next_task = task
|
||||
return next_task
|
||||
|
||||
def completed_user_tasks(self):
|
||||
"""Completed_user_tasks."""
|
||||
completed_user_tasks = self.bpmn_workflow.get_tasks(TaskState.COMPLETED)
|
||||
completed_user_tasks = self.bpmn_process_instance.get_tasks(TaskState.COMPLETED)
|
||||
completed_user_tasks.reverse()
|
||||
completed_user_tasks = list(
|
||||
filter(lambda task: not self.bpmn_workflow._is_engine_task(task.task_spec), completed_user_tasks))
|
||||
filter(lambda task: not self.bpmn_process_instance._is_engine_task(task.task_spec), completed_user_tasks))
|
||||
return completed_user_tasks
|
||||
|
||||
def previous_task(self):
|
||||
|
@ -431,39 +429,32 @@ class WorkflowProcessor(object):
|
|||
|
||||
def complete_task(self, task):
|
||||
"""Complete_task."""
|
||||
self.bpmn_workflow.complete_task_from_id(task.id)
|
||||
self.bpmn_process_instance.complete_task_from_id(task.id)
|
||||
|
||||
def get_data(self):
|
||||
"""Get_data."""
|
||||
return self.bpmn_workflow.data
|
||||
return self.bpmn_process_instance.data
|
||||
|
||||
def get_workflow_id(self):
|
||||
"""Get_workflow_id."""
|
||||
return self.workflow_model.id
|
||||
def get_process_instance_id(self):
|
||||
"""Get_process_instance_id."""
|
||||
return self.process_instance_model.id
|
||||
|
||||
@staticmethod
|
||||
def find_top_level_workflow(task):
|
||||
"""Find_top_level_workflow."""
|
||||
# Find the top level workflow, as this is where the study id etc... are stored.
|
||||
workflow = task.workflow
|
||||
while WorkflowProcessor.STUDY_ID_KEY not in workflow.data:
|
||||
if workflow.outer_workflow != workflow:
|
||||
workflow = workflow.outer_workflow
|
||||
else:
|
||||
break
|
||||
return workflow
|
||||
|
||||
def get_study_id(self):
|
||||
"""Get_study_id."""
|
||||
return self.bpmn_workflow.data[self.STUDY_ID_KEY]
|
||||
def find_top_level_process_instance(task):
|
||||
"""Find_top_level_process_instance."""
|
||||
# Find the top level process_instance, as this is where the parent id etc... are stored.
|
||||
process_instance = task.process_instance
|
||||
while process_instance.outer_process_instance != process_instance:
|
||||
process_instance = process_instance.outer_process_instance
|
||||
return process_instance
|
||||
|
||||
def get_ready_user_tasks(self):
|
||||
"""Get_ready_user_tasks."""
|
||||
return self.bpmn_workflow.get_ready_user_tasks()
|
||||
return self.bpmn_process_instance.get_ready_user_tasks()
|
||||
|
||||
def get_current_user_tasks(self):
|
||||
"""Return a list of all user tasks that are READY or COMPLETE and are parallel to the READY Task."""
|
||||
ready_tasks = self.bpmn_workflow.get_ready_user_tasks()
|
||||
ready_tasks = self.bpmn_process_instance.get_ready_user_tasks()
|
||||
additional_tasks = []
|
||||
if len(ready_tasks) > 0:
|
||||
for child in ready_tasks[0].parent.children:
|
||||
|
@ -473,30 +464,30 @@ class WorkflowProcessor(object):
|
|||
|
||||
def get_all_user_tasks(self):
|
||||
"""Get_all_user_tasks."""
|
||||
all_tasks = self.bpmn_workflow.get_tasks(TaskState.ANY_MASK)
|
||||
return [t for t in all_tasks if not self.bpmn_workflow._is_engine_task(t.task_spec)]
|
||||
all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
|
||||
return [t for t in all_tasks if not self.bpmn_process_instance._is_engine_task(t.task_spec)]
|
||||
|
||||
def get_all_completed_tasks(self):
|
||||
"""Get_all_completed_tasks."""
|
||||
all_tasks = self.bpmn_workflow.get_tasks(TaskState.ANY_MASK)
|
||||
all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
|
||||
return [t for t in all_tasks
|
||||
if not self.bpmn_workflow._is_engine_task(t.task_spec)
|
||||
if not self.bpmn_process_instance._is_engine_task(t.task_spec)
|
||||
and t.state in [TaskState.COMPLETED, TaskState.CANCELLED]]
|
||||
|
||||
def get_nav_item(self, task):
|
||||
"""Get_nav_item."""
|
||||
for nav_item in self.bpmn_workflow.get_nav_list():
|
||||
for nav_item in self.bpmn_process_instance.get_nav_list():
|
||||
if nav_item['task_id'] == task.id:
|
||||
return nav_item
|
||||
|
||||
def find_spec_and_field(self, spec_name, field_id):
|
||||
"""Tracks down a form field by name in the workflow spec(s), Returns a tuple of the task, and form."""
|
||||
workflows = [self.bpmn_workflow]
|
||||
for task in self.bpmn_workflow.get_ready_user_tasks():
|
||||
if task.workflow not in workflows:
|
||||
workflows.append(task.workflow)
|
||||
for workflow in workflows:
|
||||
for spec in workflow.spec.task_specs.values():
|
||||
"""Tracks down a form field by name in the process_instance spec(s), Returns a tuple of the task, and form."""
|
||||
process_instances = [self.bpmn_process_instance]
|
||||
for task in self.bpmn_process_instance.get_ready_user_tasks():
|
||||
if task.process_instance not in process_instances:
|
||||
process_instances.append(task.process_instance)
|
||||
for process_instance in process_instances:
|
||||
for spec in process_instance.spec.task_specs.values():
|
||||
if spec.name == spec_name:
|
||||
if not hasattr(spec, "form"):
|
||||
raise ApiError("invalid_spec",
|
||||
|
@ -510,4 +501,4 @@ class WorkflowProcessor(object):
|
|||
f"The task '{spec_name}' has no field named '{field_id}'")
|
||||
|
||||
raise ApiError("invalid_spec",
|
||||
f"Unable to find a task in the workflow called '{spec_name}'")
|
||||
f"Unable to find a task in the process_instance called '{spec_name}'")
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
"""User_service."""
|
||||
from flask import g
|
||||
|
||||
import crc.api.user
|
||||
from crc import session
|
||||
from crc.api.common import ApiError
|
||||
from crc.services.ldap_service import LdapService
|
||||
from crc.models.user import UserModel, AdminSessionModel
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from spiff_workflow_webapp.models.user import UserModel, AdminSessionModel
|
||||
|
||||
|
||||
class UserService(object):
|
||||
|
@ -73,20 +71,15 @@ class UserService(object):
|
|||
|
||||
if UserService.is_different_user(uid):
|
||||
# Impersonate the user if the given uid is valid.
|
||||
|
||||
# If the user is not in the User table, add them to it
|
||||
ldap_info = LdapService().user_info(uid)
|
||||
crc.api.user._upsert_user(ldap_info)
|
||||
|
||||
impersonate_user = session.query(UserModel).filter(UserModel.uid == uid).first()
|
||||
impersonate_user = db.session.query(UserModel).filter(UserModel.uid == uid).first()
|
||||
|
||||
if impersonate_user is not None:
|
||||
g.impersonate_user = impersonate_user
|
||||
|
||||
# Store the uid and user session token.
|
||||
session.query(AdminSessionModel).filter(AdminSessionModel.token == g.token).delete()
|
||||
session.add(AdminSessionModel(token=g.token, admin_impersonate_uid=uid))
|
||||
session.commit()
|
||||
db.session.query(AdminSessionModel).filter(AdminSessionModel.token == g.token).delete()
|
||||
db.session.add(AdminSessionModel(token=g.token, admin_impersonate_uid=uid))
|
||||
db.session.commit()
|
||||
else:
|
||||
raise ApiError("invalid_uid", "The uid provided is not valid.")
|
||||
|
||||
|
@ -102,13 +95,15 @@ class UserService(object):
|
|||
|
||||
admin_session: AdminSessionModel = UserService.get_admin_session()
|
||||
if admin_session:
|
||||
session.delete(admin_session)
|
||||
session.commit()
|
||||
db.session.delete(admin_session)
|
||||
db.session.commit()
|
||||
|
||||
@staticmethod
|
||||
def in_list(uids, allow_admin_impersonate=False):
|
||||
"""Returns true if the current user's id is in the given list of ids. False if there
|
||||
is no user, or the user is not in the list."""
|
||||
"""Returns true if the current user's id is in the given list of ids.
|
||||
|
||||
False if there is no user, or the user is not in the list.
|
||||
"""
|
||||
if UserService.has_user(): # If someone is logged in, lock tasks that don't belong to them.
|
||||
user = UserService.current_user(allow_admin_impersonate)
|
||||
if user.uid in uids:
|
||||
|
@ -119,7 +114,7 @@ class UserService(object):
|
|||
def get_admin_session() -> AdminSessionModel:
|
||||
"""Get_admin_session."""
|
||||
if UserService.user_is_admin():
|
||||
return session.query(AdminSessionModel).filter(AdminSessionModel.token == g.token).first()
|
||||
return db.session.query(AdminSessionModel).filter(AdminSessionModel.token == g.token).first()
|
||||
else:
|
||||
raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403)
|
||||
|
||||
|
@ -130,6 +125,6 @@ class UserService(object):
|
|||
admin_session = UserService.get_admin_session()
|
||||
|
||||
if admin_session is not None:
|
||||
return session.query(UserModel).filter(UserModel.uid == admin_session.admin_impersonate_uid).first()
|
||||
return db.session.query(UserModel).filter(UserModel.uid == admin_session.admin_impersonate_uid).first()
|
||||
else:
|
||||
raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403)
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_0ixyfs0" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
|
||||
<bpmn:process id="Process_HelloWorld" name="Hello World Process" isExecutable="true">
|
||||
<bpmn:documentation>This workflow asks for a name and says hello</bpmn:documentation>
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>SequenceFlow_0qyd2b7</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0qyd2b7" sourceRef="StartEvent_1" targetRef="Task_GetName" />
|
||||
<bpmn:userTask id="Task_GetName" name="Get Name" camunda:formKey="Name">
|
||||
<bpmn:documentation>Hello</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="name" label="'Name'" type="string" defaultValue="World" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0qyd2b7</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1h46b40</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1h46b40" sourceRef="Task_GetName" targetRef="Task_SayHello" />
|
||||
<bpmn:manualTask id="Task_SayHello" name="Say Hello">
|
||||
<bpmn:documentation>Hello {{name}}</bpmn:documentation>
|
||||
<bpmn:incoming>SequenceFlow_1h46b40</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0lqrc6e</bpmn:outgoing>
|
||||
</bpmn:manualTask>
|
||||
<bpmn:endEvent id="EndEvent_1l03lqw">
|
||||
<bpmn:incoming>SequenceFlow_0lqrc6e</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0lqrc6e" sourceRef="Task_SayHello" targetRef="EndEvent_1l03lqw" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_HelloWorld">
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0qyd2b7_di" bpmnElement="SequenceFlow_0qyd2b7">
|
||||
<di:waypoint x="215" y="117" />
|
||||
<di:waypoint x="270" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="UserTask_0fbucz7_di" bpmnElement="Task_GetName">
|
||||
<dc:Bounds x="270" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1h46b40_di" bpmnElement="SequenceFlow_1h46b40">
|
||||
<di:waypoint x="370" y="117" />
|
||||
<di:waypoint x="430" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="ManualTask_1tia2zr_di" bpmnElement="Task_SayHello">
|
||||
<dc:Bounds x="430" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="EndEvent_1l03lqw_di" bpmnElement="EndEvent_1l03lqw">
|
||||
<dc:Bounds x="592" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0lqrc6e_di" bpmnElement="SequenceFlow_0lqrc6e">
|
||||
<di:waypoint x="530" y="117" />
|
||||
<di:waypoint x="592" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -1,21 +1,24 @@
|
|||
"""User."""
|
||||
import os
|
||||
from typing import Any
|
||||
from flask_bpmn.models.db import db
|
||||
|
||||
|
||||
from spiff_workflow_webapp.models.process_group import ProcessGroup
|
||||
from spiff_workflow_webapp.models.user import UserModel
|
||||
from spiff_workflow_webapp.services.process_model_service import ProcessModelService
|
||||
|
||||
from tests.spiff_workflow_webapp.helpers.example_data import ExampleDataLoader
|
||||
|
||||
|
||||
# def find_or_create_user(username: str = "test_user1") -> Any:
|
||||
# user = UserModel.query.filter_by(username=username).first()
|
||||
# if user is None:
|
||||
# user = UserModel(username=username)
|
||||
# db.session.add(user)
|
||||
# db.session.commit()
|
||||
#
|
||||
# return user
|
||||
def find_or_create_user(username: str = "test_user1") -> Any:
|
||||
user = UserModel.query.filter_by(username=username).first()
|
||||
if user is None:
|
||||
user = UserModel(username=username)
|
||||
db.session.add(user)
|
||||
db.session.commit()
|
||||
|
||||
return user
|
||||
#
|
||||
#
|
||||
# def find_or_create_process_group(name: str = "test_group1") -> Any:
|
||||
|
|
|
@ -13,7 +13,7 @@ from spiff_workflow_webapp.models.process_group import ProcessGroup
|
|||
from spiff_workflow_webapp.services.process_model_service import ProcessModelService
|
||||
from spiff_workflow_webapp.models.file import FileType
|
||||
|
||||
from tests.spiff_workflow_webapp.helpers.test_data import load_test_spec
|
||||
from tests.spiff_workflow_webapp.helpers.test_data import load_test_spec, find_or_create_user
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
|
@ -46,6 +46,18 @@ def test_add_new_process_model(app, client: FlaskClient, with_bpmn_file_cleanup)
|
|||
#
|
||||
|
||||
|
||||
def test_get_workflow_from_workflow_spec(app, client: FlaskClient, with_bpmn_file_cleanup):
|
||||
# create_process_model(app, client)
|
||||
# create_spec_file(app, client)
|
||||
|
||||
user = find_or_create_user()
|
||||
spec = load_test_spec(app, 'hello_world')
|
||||
rv = client.post(f'/v1.0/workflow-specification/{spec.id}')
|
||||
assert rv.status_code == 200
|
||||
assert('hello_world' == rv.json['workflow_spec_id'])
|
||||
assert('Task_GetName' == rv.json['next_task']['name'])
|
||||
|
||||
|
||||
def create_process_model(app, client: FlaskClient):
|
||||
process_model_service = ProcessModelService()
|
||||
assert(0 == len(process_model_service.get_specs()))
|
||||
|
|
Loading…
Reference in New Issue