commit the repo when a data store is added (#1115)
Co-authored-by: jasquat <jasquat@users.noreply.github.com>
This commit is contained in:
parent
9d8a7a7e47
commit
b0dbdfb192
|
@ -182,7 +182,7 @@ explicit_package_bases = false
|
||||||
# solution was https://www.reddit.com/r/neovim/comments/11k5but/comment/jbjwwtf in vim settings
|
# solution was https://www.reddit.com/r/neovim/comments/11k5but/comment/jbjwwtf in vim settings
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
select = [
|
lint.select = [
|
||||||
# "ANN", # flake8-annotations
|
# "ANN", # flake8-annotations
|
||||||
"ASYNC", # flake8-async
|
"ASYNC", # flake8-async
|
||||||
"B", # flake8-bugbear
|
"B", # flake8-bugbear
|
||||||
|
@ -203,7 +203,7 @@ select = [
|
||||||
"YTT", # flake8-2020
|
"YTT", # flake8-2020
|
||||||
]
|
]
|
||||||
|
|
||||||
ignore = [
|
lint.ignore = [
|
||||||
"C901", # "complexity" category
|
"C901", # "complexity" category
|
||||||
"PLR", # "refactoring" category has "too many lines in method" type stuff
|
"PLR", # "refactoring" category has "too many lines in method" type stuff
|
||||||
"PLC1901",
|
"PLC1901",
|
||||||
|
@ -219,12 +219,12 @@ exclude = [
|
||||||
"migrations"
|
"migrations"
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.ruff.per-file-ignores]
|
[tool.ruff.lint.per-file-ignores]
|
||||||
"migrations/versions/*.py" = ["E501"]
|
"migrations/versions/*.py" = ["E501"]
|
||||||
"tests/**/*.py" = ["PLR2004", "S101"] # PLR2004 is about magic vars, S101 allows assert
|
"tests/**/*.py" = ["PLR2004", "S101"] # PLR2004 is about magic vars, S101 allows assert
|
||||||
"bin/*.py" = ["T"] # it's ok to print things in scripts
|
"bin/*.py" = ["T"] # it's ok to print things in scripts
|
||||||
|
|
||||||
[tool.ruff.isort]
|
[tool.ruff.lint.isort]
|
||||||
force-single-line = true
|
force-single-line = true
|
||||||
|
|
||||||
# pip install fixit && fixit fix -a src
|
# pip install fixit && fixit fix -a src
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
"""APIs for dealing with process groups, process models, and process instances."""
|
|
||||||
import json
|
import json
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import flask.wrappers
|
import flask.wrappers
|
||||||
|
from flask import g
|
||||||
from flask import jsonify
|
from flask import jsonify
|
||||||
from flask import make_response
|
from flask import make_response
|
||||||
|
|
||||||
|
@ -11,6 +11,7 @@ from spiffworkflow_backend.data_stores.kkv import KKVDataStore
|
||||||
from spiffworkflow_backend.data_stores.typeahead import TypeaheadDataStore
|
from spiffworkflow_backend.data_stores.typeahead import TypeaheadDataStore
|
||||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||||
from spiffworkflow_backend.models.db import db
|
from spiffworkflow_backend.models.db import db
|
||||||
|
from spiffworkflow_backend.routes.process_api_blueprint import _commit_and_push_to_git
|
||||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||||
|
|
||||||
DATA_STORES = {
|
DATA_STORES = {
|
||||||
|
@ -115,36 +116,39 @@ def _data_store_upsert(body: dict, insert: bool) -> flask.wrappers.Response:
|
||||||
data_store_class, _ = DATA_STORES[data_store_type]
|
data_store_class, _ = DATA_STORES[data_store_type]
|
||||||
|
|
||||||
if insert:
|
if insert:
|
||||||
model = data_store_class.create_instance(identifier, location)
|
data_store_model = data_store_class.create_instance(identifier, location)
|
||||||
else:
|
else:
|
||||||
model = data_store_class.existing_instance(identifier, location)
|
data_store_model = data_store_class.existing_instance(identifier, location)
|
||||||
|
|
||||||
model.name = name
|
data_store_model.name = name
|
||||||
model.schema = schema
|
data_store_model.schema = schema
|
||||||
model.description = description or ""
|
data_store_model.description = description or ""
|
||||||
|
|
||||||
_write_specification_to_process_group(data_store_type, model)
|
_write_specification_to_process_group(data_store_type, data_store_model)
|
||||||
|
|
||||||
db.session.add(model)
|
db.session.add(data_store_model)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
|
_commit_and_push_to_git(f"User: {g.user.username} added data store {data_store_model.identifier}")
|
||||||
return make_response(jsonify({"ok": True}), 200)
|
return make_response(jsonify({"ok": True}), 200)
|
||||||
|
|
||||||
|
|
||||||
def _write_specification_to_process_group(data_store_type: str, model: Any) -> None:
|
def _write_specification_to_process_group(
|
||||||
|
data_store_type: str, data_store_model: JSONDataStore | KKVDataStore | TypeaheadDataStore
|
||||||
|
) -> None:
|
||||||
process_group = ProcessModelService.get_process_group(
|
process_group = ProcessModelService.get_process_group(
|
||||||
model.location, find_direct_nested_items=False, find_all_nested_items=False, create_if_not_exists=True
|
data_store_model.location, find_direct_nested_items=False, find_all_nested_items=False, create_if_not_exists=True
|
||||||
)
|
)
|
||||||
|
|
||||||
if data_store_type not in process_group.data_store_specifications:
|
if data_store_type not in process_group.data_store_specifications:
|
||||||
process_group.data_store_specifications[data_store_type] = {}
|
process_group.data_store_specifications[data_store_type] = {}
|
||||||
|
|
||||||
process_group.data_store_specifications[data_store_type][model.identifier] = {
|
process_group.data_store_specifications[data_store_type][data_store_model.identifier] = {
|
||||||
"name": model.name,
|
"name": data_store_model.name,
|
||||||
"identifier": model.identifier,
|
"identifier": data_store_model.identifier,
|
||||||
"location": model.location,
|
"location": data_store_model.location,
|
||||||
"schema": model.schema,
|
"schema": data_store_model.schema,
|
||||||
"description": model.description,
|
"description": data_store_model.description,
|
||||||
}
|
}
|
||||||
|
|
||||||
ProcessModelService.update_process_group(process_group)
|
ProcessModelService.update_process_group(process_group)
|
||||||
|
|
Loading…
Reference in New Issue