Only show data stores from upsearch in the properties panel (#1116)

This commit is contained in:
jbirddog 2024-02-27 18:17:14 -05:00 committed by GitHub
parent 78be11d309
commit d66ea0e9ac
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 40 additions and 16 deletions

View File

@ -41,6 +41,9 @@ stop-dev:
be-clear-log-file:
$(IN_BACKEND) rm -f log/unit_testing.log
be-logs:
docker logs -f $(BACKEND_CONTAINER)
be-mypy:
$(IN_BACKEND) poetry run mypy src tests
@ -62,6 +65,9 @@ be-tests-par: be-clear-log-file
fe-lint-fix:
$(IN_FRONTEND) npm run lint:fix
fe-logs:
docker logs -f $(FRONTEND_CONTAINER)
fe-npm-i:
$(IN_FRONTEND) npm i
@ -82,7 +88,7 @@ take-ownership:
.PHONY: build-images dev-env \
start-dev stop-dev \
be-clear-log-file be-recreate-db be-ruff be-sh be-tests be-tests-par \
fe-lint-fix fe-npm-i fe-sh \
be-clear-log-file be-logs be-recreate-db be-ruff be-sh be-tests be-tests-par \
fe-lint-fix fe-logs fe-npm-i fe-sh \
pre-commit run-pyl \
take-ownership

View File

@ -2785,6 +2785,12 @@ paths:
description: Optional parameter to filter by a single group
schema:
type: string
- name: upsearch
in: query
required: false
description: Optional parameter to indicate if an upsearch should be performed
schema:
type: boolean
- name: page
in: query
required: false

View File

@ -24,7 +24,7 @@ class DataStoreCRUD:
raise Exception("must implement")
@staticmethod
def existing_data_stores(process_group_identifier: str | None = None) -> list[dict[str, Any]]:
def existing_data_stores(process_group_identifiers: list[str] | None = None) -> list[dict[str, Any]]:
raise Exception("must implement")
@staticmethod

View File

@ -30,12 +30,12 @@ class JSONDataStore(BpmnDataStoreSpecification, DataStoreCRUD): # type: ignore
return db.session.query(JSONDataStoreModel).filter_by(identifier=identifier, location=location).first()
@staticmethod
def existing_data_stores(process_group_identifier: str | None = None) -> list[dict[str, Any]]:
def existing_data_stores(process_group_identifiers: list[str] | None = None) -> list[dict[str, Any]]:
data_stores = []
query = db.session.query(JSONDataStoreModel.name, JSONDataStoreModel.identifier, JSONDataStoreModel.location)
if process_group_identifier is not None:
query = query.filter_by(location=process_group_identifier)
if process_group_identifiers:
query = query.filter(JSONDataStoreModel.location.in_(process_group_identifiers)) # type: ignore
keys = query.order_by(JSONDataStoreModel.name).all()
for key in keys:
data_stores.append({"name": key[0], "type": "json", "id": key[1], "clz": "JSONDataStore", "location": key[2]})

View File

@ -28,12 +28,12 @@ class KKVDataStore(BpmnDataStoreSpecification, DataStoreCRUD): # type: ignore
return db.session.query(KKVDataStoreModel).filter_by(identifier=identifier, location=location).first()
@staticmethod
def existing_data_stores(process_group_identifier: str | None = None) -> list[dict[str, Any]]:
def existing_data_stores(process_group_identifiers: list[str] | None = None) -> list[dict[str, Any]]:
data_stores = []
query = db.session.query(KKVDataStoreModel)
if process_group_identifier is not None:
query = query.filter_by(location=process_group_identifier)
if process_group_identifiers:
query = query.filter(KKVDataStoreModel.location.in_(process_group_identifiers)) # type: ignore
models = query.order_by(KKVDataStoreModel.name).all()
for model in models:
data_stores.append(

View File

@ -14,10 +14,10 @@ class TypeaheadDataStore(BpmnDataStoreSpecification, DataStoreCRUD): # type: ig
"""TypeaheadDataStore."""
@staticmethod
def existing_data_stores(process_group_identifier: str | None = None) -> list[dict[str, Any]]:
def existing_data_stores(process_group_identifiers: list[str] | None = None) -> list[dict[str, Any]]:
data_stores: list[dict[str, Any]] = []
if process_group_identifier is not None:
if process_group_identifiers:
# temporary until this data store gets location support
return data_stores

View File

@ -13,6 +13,7 @@ from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.routes.process_api_blueprint import _commit_and_push_to_git
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.upsearch_service import UpsearchService
DATA_STORES = {
"json": (JSONDataStore, "JSON Data Store"),
@ -21,15 +22,24 @@ DATA_STORES = {
}
def data_store_list(process_group_identifier: str | None = None, page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
def data_store_list(
process_group_identifier: str | None = None, upsearch: bool = False, page: int = 1, per_page: int = 100
) -> flask.wrappers.Response:
"""Returns a list of the names of all the data stores."""
data_stores = []
locations_to_upsearch = []
if process_group_identifier is not None:
if upsearch:
locations_to_upsearch = UpsearchService.upsearch_locations(process_group_identifier)
else:
locations_to_upsearch.append(process_group_identifier)
# Right now the only data stores we support are type ahead, kkv, json
data_stores.extend(JSONDataStore.existing_data_stores(process_group_identifier))
data_stores.extend(TypeaheadDataStore.existing_data_stores(process_group_identifier))
data_stores.extend(KKVDataStore.existing_data_stores(process_group_identifier))
data_stores.extend(JSONDataStore.existing_data_stores(locations_to_upsearch))
data_stores.extend(TypeaheadDataStore.existing_data_stores(locations_to_upsearch))
data_stores.extend(KKVDataStore.existing_data_stores(locations_to_upsearch))
return make_response(jsonify(data_stores), 200)

View File

@ -380,8 +380,10 @@ export default function ProcessModelEditDiagram() {
};
const onDataStoresRequested = (event: any) => {
const processGroupIdentifier =
processModel?.parent_groups?.slice(-1).pop()?.id ?? '';
HttpService.makeCallToBackend({
path: `/data-stores`,
path: `/data-stores?upsearch=true&process_group_identifier=${processGroupIdentifier}`,
successCallback: makeDataStoresApiHandler(event),
});
};