Merge branch 'main' of github.com:sartography/spiff-arena
This commit is contained in:
commit
336cd3a1c7
|
@ -1,7 +1,10 @@
|
|||
import os
|
||||
|
||||
from flask import current_app
|
||||
from spiffworkflow_backend.models.cache_generation import CacheGenerationModel
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
|
||||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
from sqlalchemy import insert
|
||||
|
@ -12,6 +15,15 @@ class DataSetupService:
|
|||
def run_setup(cls) -> list:
|
||||
return cls.save_all_process_models()
|
||||
|
||||
@classmethod
|
||||
def add_unique_reference_cache_object(
|
||||
cls, reference_objects: dict[str, ReferenceCacheModel], reference_cache: ReferenceCacheModel
|
||||
) -> None:
|
||||
reference_cache_unique = (
|
||||
f"{reference_cache.identifier}{reference_cache.relative_location}{reference_cache.type}"
|
||||
)
|
||||
reference_objects[reference_cache_unique] = reference_cache
|
||||
|
||||
@classmethod
|
||||
def save_all_process_models(cls) -> list:
|
||||
"""Build a cache of all processes, messages, correlation keys, and start events.
|
||||
|
@ -20,10 +32,13 @@ class DataSetupService:
|
|||
from the database.
|
||||
"""
|
||||
current_app.logger.debug("DataSetupService.save_all_process_models() start")
|
||||
|
||||
failing_process_models = []
|
||||
process_models = ProcessModelService.get_process_models(recursive=True)
|
||||
reference_objects = {}
|
||||
for process_model in process_models:
|
||||
files = FileSystemService.walk_files_from_root_path(True, None)
|
||||
reference_objects: dict[str, ReferenceCacheModel] = {}
|
||||
for file in files:
|
||||
if FileSystemService.is_process_model_json_file(file):
|
||||
process_model = ProcessModelService.get_process_model_from_path(file)
|
||||
current_app.logger.debug(f"Process Model: {process_model.display_name}")
|
||||
try:
|
||||
# FIXME: get_references_for_file_contents is erroring out for elements in the list
|
||||
|
@ -32,10 +47,7 @@ class DataSetupService:
|
|||
for ref in refs:
|
||||
try:
|
||||
reference_cache = ReferenceCacheModel.from_spec_reference(ref)
|
||||
reference_cache_unique = (
|
||||
f"{reference_cache.identifier}{reference_cache.relative_location}{reference_cache.type}"
|
||||
)
|
||||
reference_objects[reference_cache_unique] = reference_cache
|
||||
cls.add_unique_reference_cache_object(reference_objects, reference_cache)
|
||||
SpecFileService.update_caches_except_process(ref)
|
||||
db.session.commit()
|
||||
except Exception as ex:
|
||||
|
@ -52,6 +64,20 @@ class DataSetupService:
|
|||
str(ex2),
|
||||
)
|
||||
)
|
||||
elif FileSystemService.is_data_store_json_file(file):
|
||||
relative_location = FileSystemService.relative_location(file)
|
||||
file_name = os.path.basename(file)
|
||||
(identifier, _) = os.path.splitext(file_name)
|
||||
reference_cache = ReferenceCacheModel.from_params(
|
||||
identifier,
|
||||
identifier,
|
||||
"data_store",
|
||||
file_name,
|
||||
relative_location,
|
||||
None,
|
||||
False,
|
||||
)
|
||||
cls.add_unique_reference_cache_object(reference_objects, reference_cache)
|
||||
|
||||
current_app.logger.debug("DataSetupService.save_all_process_models() end")
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import json
|
||||
import os
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Generator
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime
|
||||
|
@ -18,6 +19,11 @@ class ProcessModelFileNotFoundError(Exception):
|
|||
pass
|
||||
|
||||
|
||||
DirectoryPredicate = Callable[[str, int], bool] | None
|
||||
FilePredicate = Callable[[str], bool] | None
|
||||
FileGenerator = Generator[str, None, None]
|
||||
|
||||
|
||||
class FileSystemService:
|
||||
|
||||
"""Simple Service meant for extension that provides some useful
|
||||
|
@ -38,6 +44,45 @@ class FileSystemService:
|
|||
finally:
|
||||
os.chdir(prevdir)
|
||||
|
||||
@classmethod
|
||||
def walk_files(
|
||||
cls, start_dir: str, directory_predicate: DirectoryPredicate, file_predicate: FilePredicate
|
||||
) -> FileGenerator:
|
||||
depth = 0
|
||||
for root, subdirs, files in os.walk(start_dir):
|
||||
if directory_predicate:
|
||||
subdirs[:] = [dir for dir in subdirs if directory_predicate(dir, depth)]
|
||||
for f in files:
|
||||
file = os.path.join(root, f)
|
||||
if file_predicate and not file_predicate(file):
|
||||
continue
|
||||
yield file
|
||||
depth += 1
|
||||
|
||||
@classmethod
|
||||
def non_git_dir(cls, dirname: str, depth: int) -> bool:
|
||||
return dirname != ".git"
|
||||
|
||||
@classmethod
|
||||
def not_recursive(cls, dirname: str, depth: int) -> bool:
|
||||
return depth == 0
|
||||
|
||||
@classmethod
|
||||
def standard_directory_predicate(cls, recursive: bool) -> DirectoryPredicate:
|
||||
return cls.non_git_dir if recursive else cls.not_recursive
|
||||
|
||||
@classmethod
|
||||
def is_process_model_json_file(cls, file: str) -> bool:
|
||||
return file.endswith(cls.PROCESS_MODEL_JSON_FILE)
|
||||
|
||||
@classmethod
|
||||
def is_data_store_json_file(cls, file: str) -> bool:
|
||||
return file.endswith("_datastore.json")
|
||||
|
||||
@classmethod
|
||||
def walk_files_from_root_path(cls, recursive: bool, file_predicate: FilePredicate) -> FileGenerator:
|
||||
yield from cls.walk_files(cls.root_path(), cls.standard_directory_predicate(recursive), file_predicate)
|
||||
|
||||
@staticmethod
|
||||
def root_path() -> str:
|
||||
dir_name = current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"]
|
||||
|
@ -137,6 +182,10 @@ class FileSystemService:
|
|||
workflow_path = FileSystemService.process_model_full_path(process_model)
|
||||
return os.path.relpath(workflow_path, start=FileSystemService.root_path())
|
||||
|
||||
@classmethod
|
||||
def relative_location(cls, path: str) -> str:
|
||||
return os.path.dirname(os.path.relpath(path, start=FileSystemService.root_path()))
|
||||
|
||||
@staticmethod
|
||||
def process_group_path_for_spec(process_model: ProcessModelInfo) -> str:
|
||||
# os.path.split apparently returns 2 element tulple like: (first/path, last_item)
|
||||
|
|
|
@ -2,7 +2,6 @@ import json
|
|||
import os
|
||||
import shutil
|
||||
import uuid
|
||||
from glob import glob
|
||||
from json import JSONDecodeError
|
||||
from typing import TypeVar
|
||||
|
||||
|
@ -153,6 +152,11 @@ class ProcessModelService(FileSystemService):
|
|||
path = os.path.join(FileSystemService.root_path(), relative_path)
|
||||
return cls.__scan_process_model(path)
|
||||
|
||||
@classmethod
|
||||
def get_process_model_from_path(cls, path: str) -> ProcessModelInfo:
|
||||
relative_path = os.path.relpath(path, start=FileSystemService.root_path())
|
||||
return cls.get_process_model_from_relative_path(os.path.dirname(relative_path))
|
||||
|
||||
@classmethod
|
||||
def get_process_model(cls, process_model_id: str) -> ProcessModelInfo:
|
||||
"""Get a process model from a model and group id.
|
||||
|
@ -180,13 +184,18 @@ class ProcessModelService(FileSystemService):
|
|||
awesome_id = process_group_id.replace("/", os.sep)
|
||||
root_path = os.path.join(root_path, awesome_id)
|
||||
|
||||
process_model_glob = os.path.join(root_path, "*", "process_model.json")
|
||||
if recursive:
|
||||
process_model_glob = os.path.join(root_path, "**", "process_model.json")
|
||||
if recursive is None:
|
||||
recursive = False
|
||||
|
||||
process_model_files = FileSystemService.walk_files(
|
||||
root_path,
|
||||
FileSystemService.standard_directory_predicate(recursive),
|
||||
FileSystemService.is_process_model_json_file,
|
||||
)
|
||||
|
||||
for file in process_model_files:
|
||||
process_model = cls.get_process_model_from_path(file)
|
||||
|
||||
for file in glob(process_model_glob, recursive=True):
|
||||
process_model_relative_path = os.path.relpath(file, start=FileSystemService.root_path())
|
||||
process_model = cls.get_process_model_from_relative_path(os.path.dirname(process_model_relative_path))
|
||||
if include_files:
|
||||
files = FileSystemService.get_sorted_files(process_model)
|
||||
for f in files:
|
||||
|
|
Loading…
Reference in New Issue