Merge commit '00dc4e149d8396e54d0bbfa77915a57ab8aa972e'

This commit is contained in:
burnettk 2022-10-26 13:42:08 -04:00
commit bc3f0d9e20
4 changed files with 40 additions and 17 deletions

View File

@ -20,7 +20,7 @@ groups:
natalia, natalia,
] ]
finance: Finance Team:
users: [finance_user1] users: [finance_user1]
permissions: permissions:
@ -30,11 +30,24 @@ permissions:
allowed_permissions: [create, read, update, delete, list, instantiate] allowed_permissions: [create, read, update, delete, list, instantiate]
uri: /* uri: /*
finance-admin: tasks-crud:
groups: [finance] groups: [everybody]
users: [] users: []
allowed_permissions: [create, read, update, delete] allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-groups/execute-procure-to-pay/* uri: /v1.0/tasks/*
# TODO: all uris should really have the same structure
finance-admin-group:
groups: ["Finance Team"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-groups/finance/*
finance-admin-model:
groups: ["Finance Team"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-models/finance/*
read-all: read-all:
groups: [finance, admin] groups: [finance, admin]

View File

@ -46,12 +46,12 @@ class ActiveTaskModel(SpiffworkflowBaseDBModel):
updated_at_in_seconds: int = db.Column(db.Integer) updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer)
task_id = db.Column(db.String(50)) task_id: str = db.Column(db.String(50))
task_name = db.Column(db.String(50)) task_name: str = db.Column(db.String(50))
task_title = db.Column(db.String(50)) task_title: str = db.Column(db.String(50))
task_type = db.Column(db.String(50)) task_type: str = db.Column(db.String(50))
task_status = db.Column(db.String(50)) task_status: str = db.Column(db.String(50))
process_model_display_name = db.Column(db.String(255)) process_model_display_name: str = db.Column(db.String(255))
active_task_users = relationship("ActiveTaskUserModel", cascade="delete") active_task_users = relationship("ActiveTaskUserModel", cascade="delete")
potential_owners = relationship( # type: ignore potential_owners = relationship( # type: ignore

View File

@ -1,4 +1,5 @@
"""APIs for dealing with process groups, process models, and process instances.""" """APIs for dealing with process groups, process models, and process instances."""
import dataclasses
import json import json
import os import os
import random import random
@ -1352,6 +1353,13 @@ def get_spiff_task_from_process_instance(
task_uuid = uuid.UUID(task_id) task_uuid = uuid.UUID(task_id)
spiff_task = processor.bpmn_process_instance.get_task(task_uuid) spiff_task = processor.bpmn_process_instance.get_task(task_uuid)
# FOR DEBUGGING: save this variable so we get it in sentry when something fails
active_task = ActiveTaskModel.query.filter_by(task_id=task_id).first()
if active_task:
task_json = dataclasses.asdict(active_task)
print(f"task_json: {task_json}")
########
if spiff_task is None: if spiff_task is None:
raise ( raise (
ApiError( ApiError(

View File

@ -20,10 +20,12 @@ class DataSetupService:
failing_process_models = [] failing_process_models = []
process_models = ProcessModelService().get_process_models() process_models = ProcessModelService().get_process_models()
for process_model in process_models: for process_model in process_models:
if process_model.primary_file_name: process_model_files = SpecFileService.get_files(
process_model, extension_filter=".bpmn"
)
for process_model_file in process_model_files:
bpmn_xml_file_contents = SpecFileService.get_data( bpmn_xml_file_contents = SpecFileService.get_data(
process_model, process_model.primary_file_name process_model, process_model_file.name
) )
bad_files = [ bad_files = [
"B.1.0.bpmn", "B.1.0.bpmn",
@ -32,21 +34,21 @@ class DataSetupService:
"C.6.0.bpmn", "C.6.0.bpmn",
"TC-5.1.bpmn", "TC-5.1.bpmn",
] ]
if process_model.primary_file_name in bad_files: if process_model_file.name in bad_files:
continue continue
current_app.logger.debug( current_app.logger.debug(
f"primary_file_name: {process_model.primary_file_name}" f"primary_file_name: {process_model_file.name}"
) )
try: try:
SpecFileService.update_file( SpecFileService.update_file(
process_model, process_model,
process_model.primary_file_name, process_model_file.name,
bpmn_xml_file_contents, bpmn_xml_file_contents,
) )
except Exception as ex: except Exception as ex:
failing_process_models.append( failing_process_models.append(
( (
f"{process_model.process_group_id}/{process_model.id}/{process_model.primary_file_name}", f"{process_model.process_group_id}/{process_model.id}/{process_model_file.name}",
str(ex), str(ex),
) )
) )