commit the repo when a data store is added (#1115)

Co-authored-by: jasquat <jasquat@users.noreply.github.com>
This commit is contained in:
jasquat 2024-02-27 11:50:17 -05:00 committed by GitHub
parent 9d8a7a7e47
commit b0dbdfb192
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 24 additions and 20 deletions

View File

@ -182,7 +182,7 @@ explicit_package_bases = false
# solution was https://www.reddit.com/r/neovim/comments/11k5but/comment/jbjwwtf in vim settings
[tool.ruff]
select = [
lint.select = [
# "ANN", # flake8-annotations
"ASYNC", # flake8-async
"B", # flake8-bugbear
@ -203,7 +203,7 @@ select = [
"YTT", # flake8-2020
]
ignore = [
lint.ignore = [
"C901", # "complexity" category
"PLR", # "refactoring" category has "too many lines in method" type stuff
"PLC1901",
@ -219,12 +219,12 @@ exclude = [
"migrations"
]
[tool.ruff.per-file-ignores]
[tool.ruff.lint.per-file-ignores]
"migrations/versions/*.py" = ["E501"]
"tests/**/*.py" = ["PLR2004", "S101"] # PLR2004 is about magic vars, S101 allows assert
"bin/*.py" = ["T"] # it's ok to print things in scripts
[tool.ruff.isort]
[tool.ruff.lint.isort]
force-single-line = true
# pip install fixit && fixit fix -a src

View File

@ -1,8 +1,8 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
from typing import Any
import flask.wrappers
from flask import g
from flask import jsonify
from flask import make_response
@ -11,6 +11,7 @@ from spiffworkflow_backend.data_stores.kkv import KKVDataStore
from spiffworkflow_backend.data_stores.typeahead import TypeaheadDataStore
from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.routes.process_api_blueprint import _commit_and_push_to_git
from spiffworkflow_backend.services.process_model_service import ProcessModelService
DATA_STORES = {
@ -115,36 +116,39 @@ def _data_store_upsert(body: dict, insert: bool) -> flask.wrappers.Response:
data_store_class, _ = DATA_STORES[data_store_type]
if insert:
model = data_store_class.create_instance(identifier, location)
data_store_model = data_store_class.create_instance(identifier, location)
else:
model = data_store_class.existing_instance(identifier, location)
data_store_model = data_store_class.existing_instance(identifier, location)
model.name = name
model.schema = schema
model.description = description or ""
data_store_model.name = name
data_store_model.schema = schema
data_store_model.description = description or ""
_write_specification_to_process_group(data_store_type, model)
_write_specification_to_process_group(data_store_type, data_store_model)
db.session.add(model)
db.session.add(data_store_model)
db.session.commit()
_commit_and_push_to_git(f"User: {g.user.username} added data store {data_store_model.identifier}")
return make_response(jsonify({"ok": True}), 200)
def _write_specification_to_process_group(data_store_type: str, model: Any) -> None:
def _write_specification_to_process_group(
data_store_type: str, data_store_model: JSONDataStore | KKVDataStore | TypeaheadDataStore
) -> None:
process_group = ProcessModelService.get_process_group(
model.location, find_direct_nested_items=False, find_all_nested_items=False, create_if_not_exists=True
data_store_model.location, find_direct_nested_items=False, find_all_nested_items=False, create_if_not_exists=True
)
if data_store_type not in process_group.data_store_specifications:
process_group.data_store_specifications[data_store_type] = {}
process_group.data_store_specifications[data_store_type][model.identifier] = {
"name": model.name,
"identifier": model.identifier,
"location": model.location,
"schema": model.schema,
"description": model.description,
process_group.data_store_specifications[data_store_type][data_store_model.identifier] = {
"name": data_store_model.name,
"identifier": data_store_model.identifier,
"location": data_store_model.location,
"schema": data_store_model.schema,
"description": data_store_model.description,
}
ProcessModelService.update_process_group(process_group)