From 7249fa69e1584f0ab8f38027ff14389e42257a5a Mon Sep 17 00:00:00 2001 From: burnettk Date: Wed, 6 Jul 2022 22:59:44 -0400 Subject: [PATCH] update import tickets script --- .../import-tickets/import-tickets.bpmn | 141 ++++++++++-------- 1 file changed, 82 insertions(+), 59 deletions(-) diff --git a/sartography-admin/import-tickets/import-tickets.bpmn b/sartography-admin/import-tickets/import-tickets.bpmn index 23a888e3..c46deb55 100644 --- a/sartography-admin/import-tickets/import-tickets.bpmn +++ b/sartography-admin/import-tickets/import-tickets.bpmn @@ -12,79 +12,102 @@ Flow_0pvp5mz Flow_04qc4ur - import os -from flask_bpmn.models.db import db - -from spiffworkflow_backend.models.process_instance import ProcessInstanceModel - -import csv - -from spiffworkflow_backend import create_app -from spiffworkflow_backend.models.user import UserModel -from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor -from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService -from flask import current_app - -from spiffworkflow_backend.services.process_model_service import ProcessModelService + """Import tickets, for use in script task.""" -process_model_identifier_ticket = "ticket" -db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.process_model_identifier == process_model_identifier_ticket).delete() -db.session.commit() +def main(): + """Use main to avoid global namespace.""" + import csv -"""Print process instance count.""" -process_instances = ProcessInstanceModel.query.filter_by(process_model_identifier=process_model_identifier_ticket).all() -process_instance_count = len(process_instances) -print(f"process_instance_count: {process_instance_count}") + from flask_bpmn.models.db import db -process_model = ProcessModelService().get_process_model(process_model_identifier_ticket) -columns_to_data_key_mappings = {"Month": "month", "MS": "milestone", "ID": "req_id", "Dev Days": "dev_days", "Feature": "feature", "Priority": "priority"} -columns_to_header_index_mappings = {} + from spiffworkflow_backend.models.process_instance import ProcessInstanceModel + from spiffworkflow_backend.models.user import UserModel + from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, + ) + from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, + ) -user = UserModel.query.filter_by(username='test_user1').first() + process_model_identifier_ticket = "ticket" + db.session.query(ProcessInstanceModel).filter( + ProcessInstanceModel.process_model_identifier == process_model_identifier_ticket + ).delete() + db.session.commit() -with open("tests/files/tickets.csv") as infile: - reader = csv.reader(infile, delimiter=",") + """Print process instance count.""" + process_instances = ProcessInstanceModel.query.filter_by( + process_model_identifier=process_model_identifier_ticket + ).all() + process_instance_count = len(process_instances) + print(f"process_instance_count: {process_instance_count}") - # first row is garbage - next(reader) + columns_to_data_key_mappings = { + "Month": "month", + "MS": "milestone", + "ID": "req_id", + "Dev Days": "dev_days", + "Feature": "feature", + "Priority": "priority", + } + columns_to_header_index_mappings = {} - header = next(reader) - for column_name in columns_to_data_key_mappings: - columns_to_header_index_mappings[column_name] = header.index(column_name) - id_index = header.index("ID") - priority_index = header.index("Priority") - print(f"header: {header}") - for row in reader: - ticket_identifier = row[id_index] - priority = row[priority_index] - print(f"ticket_identifier: {ticket_identifier}") - print(f"priority: {priority}") + user = UserModel.query.filter_by(username="test_user1").first() - process_instance = ProcessInstanceService.create_process_instance( - process_model_identifier_ticket, user - ) - processor = ProcessInstanceProcessor(process_instance) + with open("tests/files/tickets.csv") as infile: + reader = csv.reader(infile, delimiter=",") - processor.do_engine_steps() - # processor.save() + # first row is garbage + next(reader) - for column_name, desired_data_key in columns_to_data_key_mappings.items(): - appropriate_index = columns_to_header_index_mappings[column_name] - print(f"appropriate_index: {appropriate_index}") - processor.bpmn_process_instance.data[desired_data_key] = row[appropriate_index] + header = next(reader) + for column_name in columns_to_data_key_mappings: + columns_to_header_index_mappings[column_name] = header.index(column_name) + id_index = header.index("ID") + priority_index = header.index("Priority") + month_index = header.index("Month") + print(f"header: {header}") + for row in reader: + ticket_identifier = row[id_index] + priority = row[priority_index] + month = row[month_index] + print(f"ticket_identifier: {ticket_identifier}") + print(f"priority: {priority}") + # if there is no month, who cares about it. + if month: + process_instance = ProcessInstanceService.create_process_instance( + process_model_identifier=process_model_identifier_ticket, user=user, process_group_identifier='sartography-admin' + ) + processor = ProcessInstanceProcessor(process_instance) - # you at least need a month, or else this row in the csv is considered garbage - month_value = processor.bpmn_process_instance.data["month"] - if month_value == "" or month_value is None: - db.delete(process_instance) - db.session.commit() - continue + processor.do_engine_steps() + # processor.save() - processor.save() + for column_name, desired_data_key in columns_to_data_key_mappings.items(): + appropriate_index = columns_to_header_index_mappings[column_name] + print(f"appropriate_index: {appropriate_index}") + processor.bpmn_process_instance.data[desired_data_key] = row[ + appropriate_index + ] - process_instance_data = processor.get_data() - print(f"process_instance_data: {process_instance_data}") + # you at least need a month, or else this row in the csv is considered garbage + month_value = processor.bpmn_process_instance.data["month"] + if month_value == "" or month_value is None: + db.delete(process_instance) + db.session.commit() + continue + + processor.save() + + process_instance_data = processor.get_data() + print(f"process_instance_data: {process_instance_data}") + + +main() + +# to avoid serialization issues +del main