diff --git a/spiffworkflow-backend/migrations/versions/12a8864399d4_.py b/spiffworkflow-backend/migrations/versions/12a8864399d4_.py new file mode 100644 index 000000000..de4708db2 --- /dev/null +++ b/spiffworkflow-backend/migrations/versions/12a8864399d4_.py @@ -0,0 +1,45 @@ +"""empty message + +Revision ID: 12a8864399d4 +Revises: bc2b84d013e0 +Create Date: 2023-12-19 08:07:12.265442 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '12a8864399d4' +down_revision = 'bc2b84d013e0' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('json_data_store', schema=None) as batch_op: + batch_op.drop_index('ix_json_data_store_name') + + op.drop_table('json_data_store') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('json_data_store', + sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False), + sa.Column('name', mysql.VARCHAR(length=255), nullable=True), + sa.Column('location', mysql.VARCHAR(length=255), nullable=True), + sa.Column('data', mysql.JSON(), nullable=True), + sa.Column('updated_at_in_seconds', mysql.INTEGER(), autoincrement=False, nullable=True), + sa.Column('created_at_in_seconds', mysql.INTEGER(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id'), + mysql_collate='utf8mb4_0900_ai_ci', + mysql_default_charset='utf8mb4', + mysql_engine='InnoDB' + ) + with op.batch_alter_table('json_data_store', schema=None) as batch_op: + batch_op.create_index('ix_json_data_store_name', ['name'], unique=False) + + # ### end Alembic commands ### diff --git a/spiffworkflow-backend/migrations/versions/a872f8f2e909_.py b/spiffworkflow-backend/migrations/versions/a872f8f2e909_.py new file mode 100644 index 000000000..88c39210e --- /dev/null +++ b/spiffworkflow-backend/migrations/versions/a872f8f2e909_.py @@ -0,0 +1,48 @@ +"""empty message + +Revision ID: a872f8f2e909 +Revises: 12a8864399d4 +Create Date: 2023-12-19 08:40:26.572613 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'a872f8f2e909' +down_revision = '12a8864399d4' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('json_data_store', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('identifier', sa.String(length=255), nullable=False), + sa.Column('location', sa.String(length=255), nullable=False), + sa.Column('schema', sa.JSON(), nullable=False), + sa.Column('data', sa.JSON(), nullable=False), + sa.Column('description', sa.String(length=255), nullable=True), + sa.Column('updated_at_in_seconds', sa.Integer(), nullable=False), + sa.Column('created_at_in_seconds', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('identifier', 'location', name='_identifier_location_unique') + ) + with op.batch_alter_table('json_data_store', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_json_data_store_identifier'), ['identifier'], unique=False) + batch_op.create_index(batch_op.f('ix_json_data_store_name'), ['name'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('json_data_store', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_json_data_store_name')) + batch_op.drop_index(batch_op.f('ix_json_data_store_identifier')) + + op.drop_table('json_data_store') + # ### end Alembic commands ### diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index f40d31eba..e9f50cad8 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -2770,6 +2770,28 @@ paths: responses: "200": description: The list of currently defined data store objects + post: + operationId: spiffworkflow_backend.routes.data_store_controller.data_store_create + summary: Create a new data store instance. + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DataStore" + tags: + - Data Stores + responses: + "200": + description: The newly created data store instance + /data-stores/types: + get: + operationId: spiffworkflow_backend.routes.data_store_controller.data_store_types + summary: Return a list of the data store types. + tags: + - Data Stores + responses: + "200": + description: The list of currently defined data store types /data-stores/{data_store_type}/{name}: parameters: - name: data_store_type @@ -2998,31 +3020,22 @@ components: DataStore: properties: id: - type: integer - example: 1234 - key: type: string - example: MyKey - workflow_id: - type: integer - x-nullable: true - example: 12 - user_id: + example: employees + name: + type: string + example: Emplyoees DataStore + type: + type: string + example: TypeaheadDataStore + description: type: string x-nullable: true - example: dhf8r - task_id: + example: This data store contains all the employees + parent_group_id: type: string x-nullable: true - example: MyTask - process_model_id: - type: string - x-nullable: true - example: My Spec Name - value: - type: string - x-nullable: true - example: Some Value + example: Optional parent group id to specify the location of this data store Process: properties: identifier: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/data_stores/crud.py b/spiffworkflow-backend/src/spiffworkflow_backend/data_stores/crud.py index 17b1e06a5..c014545ba 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/data_stores/crud.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/data_stores/crud.py @@ -2,6 +2,10 @@ from typing import Any class DataStoreCRUD: + @staticmethod + def create_instance(name: str, identifier: str, location: str, schema: dict[str, Any], description: str | None) -> None: + raise Exception("must implement") + @staticmethod def existing_data_stores() -> list[dict[str, Any]]: raise Exception("must implement") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/data_stores/json.py b/spiffworkflow-backend/src/spiffworkflow_backend/data_stores/json.py index a398f3a0c..f00354093 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/data_stores/json.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/data_stores/json.py @@ -10,6 +10,15 @@ from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.json_data_store import JSONDataStoreModel from spiffworkflow_backend.services.file_system_service import FileSystemService from spiffworkflow_backend.services.reference_cache_service import ReferenceCacheService +from spiffworkflow_backend.services.upsearch_service import UpsearchService + + +class DataStoreReadError(Exception): + pass + + +class DataStoreWriteError(Exception): + pass def _process_model_location_for_task(spiff_task: SpiffTask) -> str | None: @@ -19,29 +28,22 @@ def _process_model_location_for_task(spiff_task: SpiffTask) -> str | None: return None -def _data_store_filename(name: str) -> str: - return f"{name}.json" - - -def _data_store_exists_at_location(location: str, name: str) -> bool: - return FileSystemService.file_exists_at_relative_path(location, _data_store_filename(name)) - - -def _data_store_location_for_task(spiff_task: SpiffTask, name: str) -> str | None: - location = _process_model_location_for_task(spiff_task) - if location is None: - return None - if _data_store_exists_at_location(location, name): - return location - location = ReferenceCacheService.upsearch(location, name, "data_store") - if location is None or not _data_store_exists_at_location(location, name): - return None - return location - - class JSONDataStore(BpmnDataStoreSpecification, DataStoreCRUD): # type: ignore """JSONDataStore.""" + @staticmethod + def create_instance(name: str, identifier: str, location: str, schema: dict[str, Any], description: str | None) -> None: + model = JSONDataStoreModel( + name=name, + identifier=identifier, + location=location, + schema=schema, + description=description or "", + data={}, + ) + db.session.add(model) + db.session.commit() + @staticmethod def existing_data_stores() -> list[dict[str, Any]]: data_stores = [] @@ -58,35 +60,55 @@ class JSONDataStore(BpmnDataStoreSpecification, DataStoreCRUD): # type: ignore @staticmethod def build_response_item(model: Any) -> dict[str, Any]: - return {"location": model.location, "data": model.data} + return {"location": model.location, "identifier": model.identifier, "data": model.data} def get(self, my_task: SpiffTask) -> None: """get.""" model: JSONDataStoreModel | None = None - location = _data_store_location_for_task(my_task, self.bpmn_id) + location = self._data_store_location_for_task(my_task, self.bpmn_id) if location is not None: - model = db.session.query(JSONDataStoreModel).filter_by(name=self.bpmn_id, location=location).first() + model = db.session.query(JSONDataStoreModel).filter_by(identifier=self.bpmn_id, location=location).first() if model is None: - raise Exception(f"Unable to read from data store '{self.bpmn_id}' using location '{location}'.") + raise DataStoreReadError(f"Unable to read from data store '{self.bpmn_id}' using location '{location}'.") my_task.data[self.bpmn_id] = model.data def set(self, my_task: SpiffTask) -> None: """set.""" - location = _data_store_location_for_task(my_task, self.bpmn_id) - if location is None: - raise Exception(f"Unable to write to data store '{self.bpmn_id}' using location '{location}'.") - data = my_task.data[self.bpmn_id] - model = JSONDataStoreModel( - name=self.bpmn_id, - location=location, - data=data, - ) + model: JSONDataStoreModel | None = None + location = self._data_store_location_for_task(my_task, self.bpmn_id) + + if location is not None: + model = JSONDataStoreModel.query.filter_by(identifier=self.bpmn_id, location=location).first() + if location is None or model is None: + raise DataStoreWriteError(f"Unable to write to data store '{self.bpmn_id}' using location '{location}'.") + + data = my_task.data[self.bpmn_id] + + # TODO: validate data against schema + model.data = data - db.session.query(JSONDataStoreModel).filter_by(name=self.bpmn_id, location=location).delete() db.session.add(model) db.session.commit() del my_task.data[self.bpmn_id] + def _data_store_location_for_task(self, spiff_task: SpiffTask, identifier: str) -> str | None: + location = _process_model_location_for_task(spiff_task) + if location is None: + return None + + locations = UpsearchService.upsearch_locations(location) + model = ( + JSONDataStoreModel.query.filter_by(identifier=identifier) + .filter(JSONDataStoreModel.location.in_(locations)) # type: ignore + .order_by(JSONDataStoreModel.location.desc()) # type: ignore + .first() + ) + + if model is None: + return None + + return model.location # type: ignore + @staticmethod def register_data_store_class(data_store_classes: dict[str, Any]) -> None: data_store_classes["JSONDataStore"] = JSONDataStore @@ -114,21 +136,40 @@ class JSONFileDataStore(BpmnDataStoreSpecification): # type: ignore def get(self, my_task: SpiffTask) -> None: """get.""" - location = _data_store_location_for_task(my_task, self.bpmn_id) + location = self._data_store_location_for_task(my_task, self.bpmn_id) if location is None: - raise Exception(f"Unable to read from data store '{self.bpmn_id}' using location '{location}'.") - contents = FileSystemService.contents_of_json_file_at_relative_path(location, _data_store_filename(self.bpmn_id)) + raise DataStoreReadError(f"Unable to read from data store '{self.bpmn_id}' using location '{location}'.") + contents = FileSystemService.contents_of_json_file_at_relative_path(location, self._data_store_filename(self.bpmn_id)) my_task.data[self.bpmn_id] = contents def set(self, my_task: SpiffTask) -> None: """set.""" - location = _data_store_location_for_task(my_task, self.bpmn_id) + location = self._data_store_location_for_task(my_task, self.bpmn_id) if location is None: - raise Exception(f"Unable to write to data store '{self.bpmn_id}' using location '{location}'.") + raise DataStoreWriteError(f"Unable to write to data store '{self.bpmn_id}' using location '{location}'.") data = my_task.data[self.bpmn_id] - FileSystemService.write_to_json_file_at_relative_path(location, _data_store_filename(self.bpmn_id), data) + FileSystemService.write_to_json_file_at_relative_path(location, self._data_store_filename(self.bpmn_id), data) del my_task.data[self.bpmn_id] + def _data_store_location_for_task(self, spiff_task: SpiffTask, identifier: str) -> str | None: + location = _process_model_location_for_task(spiff_task) + if location is None: + return None + if self._data_store_exists_at_location(location, identifier): + return location + location = ReferenceCacheService.upsearch(location, identifier, "data_store") + if location is None: + return None + if not self._data_store_exists_at_location(location, identifier): + return None + return location + + def _data_store_exists_at_location(self, location: str, identifier: str) -> bool: + return FileSystemService.file_exists_at_relative_path(location, self._data_store_filename(identifier)) + + def _data_store_filename(self, name: str) -> str: + return f"{name}.json" + @staticmethod def register_data_store_class(data_store_classes: dict[str, Any]) -> None: data_store_classes["JSONFileDataStore"] = JSONFileDataStore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/json_data_store.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/json_data_store.py index 06c719dac..4abd6b776 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/json_data_store.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/json_data_store.py @@ -1,5 +1,7 @@ from dataclasses import dataclass +from sqlalchemy import UniqueConstraint + from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from spiffworkflow_backend.models.db import db @@ -7,10 +9,14 @@ from spiffworkflow_backend.models.db import db @dataclass class JSONDataStoreModel(SpiffworkflowBaseDBModel): __tablename__ = "json_data_store" + __table_args__ = (UniqueConstraint("identifier", "location", name="_identifier_location_unique"),) id: int = db.Column(db.Integer, primary_key=True) - name: str = db.Column(db.String(255), index=True) - location: str = db.Column(db.String(255)) - data: dict = db.Column(db.JSON) - updated_at_in_seconds: int = db.Column(db.Integer) - created_at_in_seconds: int = db.Column(db.Integer) + name: str = db.Column(db.String(255), index=True, nullable=False) + identifier: str = db.Column(db.String(255), index=True, nullable=False) + location: str = db.Column(db.String(255), nullable=False) + schema: dict = db.Column(db.JSON, nullable=False) + data: dict = db.Column(db.JSON, nullable=False) + description: str = db.Column(db.String(255)) + updated_at_in_seconds: int = db.Column(db.Integer, nullable=False) + created_at_in_seconds: int = db.Column(db.Integer, nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/data_store_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/data_store_controller.py index 1e652c612..d38ddbfae 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/data_store_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/data_store_controller.py @@ -1,5 +1,5 @@ """APIs for dealing with process groups, process models, and process instances.""" - +import json from typing import Any import flask.wrappers @@ -11,6 +11,12 @@ from spiffworkflow_backend.data_stores.kkv import KKVDataStore from spiffworkflow_backend.data_stores.typeahead import TypeaheadDataStore from spiffworkflow_backend.exceptions.api_error import ApiError +DATA_STORES = { + "json": (JSONDataStore, "JSON Data Store"), + "kkv": (KKVDataStore, "Keyed Key-Value Data Store"), + "typeahead": (TypeaheadDataStore, "Typeahead Data Store"), +} + def data_store_list() -> flask.wrappers.Response: """Returns a list of the names of all the data stores.""" @@ -25,6 +31,16 @@ def data_store_list() -> flask.wrappers.Response: return make_response(jsonify(data_stores), 200) +def data_store_types() -> flask.wrappers.Response: + """Returns a list of the types of available data stores.""" + + # this if == "json" check is temporary while we roll out support for other data stores + # being created with locations, identifiers and schemas + data_store_types = [{"type": k, "name": v[0].__name__, "description": v[1]} for k, v in DATA_STORES.items() if k == "json"] + + return make_response(jsonify(data_store_types), 200) + + def _build_response(data_store_class: Any, name: str, page: int, per_page: int) -> flask.wrappers.Response: data_store_query = data_store_class.query_data_store(name) data = data_store_query.paginate(page=page, per_page=per_page, error_out=False) @@ -46,13 +62,41 @@ def _build_response(data_store_class: Any, name: str, page: int, per_page: int) def data_store_item_list(data_store_type: str, name: str, page: int = 1, per_page: int = 100) -> flask.wrappers.Response: """Returns a list of the items in a data store.""" - if data_store_type == "typeahead": - return _build_response(TypeaheadDataStore, name, page, per_page) + if data_store_type not in DATA_STORES: + raise ApiError("unknown_data_store", f"Unknown data store type: {data_store_type}", status_code=400) - if data_store_type == "kkv": - return _build_response(KKVDataStore, name, page, per_page) + data_store_class, _ = DATA_STORES[data_store_type] + return _build_response(data_store_class, name, page, per_page) - if data_store_type == "json": - return _build_response(JSONDataStore, name, page, per_page) - raise ApiError("unknown_data_store", f"Unknown data store type: {data_store_type}", status_code=400) +def data_store_create(body: dict) -> flask.wrappers.Response: + try: + data_store_type = body["type"] + name = body["name"] + identifier = body["id"] + location = body["location"] + description = body.get("description") + schema = body["schema"] + except Exception as e: + raise ApiError( + "data_store_required_key_missing", + "A valid JSON Schema is required when creating a new data store instance.", + status_code=400, + ) from e + + try: + schema = json.loads(schema) + except Exception as e: + raise ApiError( + "data_store_invalid_schema", + "A valid JSON Schema is required when creating a new data store instance.", + status_code=400, + ) from e + + if data_store_type not in DATA_STORES: + raise ApiError("unknown_data_store", f"Unknown data store type: {data_store_type}", status_code=400) + + data_store_class, _ = DATA_STORES[data_store_type] + data_store_class.create_instance(name, identifier, location, schema, description) + + return make_response(jsonify({"ok": True}), 200) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/reference_cache_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/reference_cache_service.py index 936f7eb4b..839d4b246 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/reference_cache_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/reference_cache_service.py @@ -1,10 +1,9 @@ -import os - from sqlalchemy import insert from spiffworkflow_backend.models.cache_generation import CacheGenerationModel from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel +from spiffworkflow_backend.services.upsearch_service import UpsearchService class ReferenceCacheService: @@ -37,7 +36,7 @@ class ReferenceCacheService: cache_generation = CacheGenerationModel.newest_generation_for_table("reference_cache") if cache_generation is None: return None - locations = cls.upsearch_locations(location) + locations = UpsearchService.upsearch_locations(location) references = ( ReferenceCacheModel.query.filter_by( identifier=identifier, @@ -54,13 +53,3 @@ class ReferenceCacheService: return reference.relative_location # type: ignore return None - - @classmethod - def upsearch_locations(cls, location: str) -> list[str]: - locations = [] - - while location != "": - locations.append(location) - location = os.path.dirname(location) - - return locations diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/upsearch_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/upsearch_service.py new file mode 100644 index 000000000..b1732e67b --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/upsearch_service.py @@ -0,0 +1,14 @@ +import os + + +class UpsearchService: + @classmethod + def upsearch_locations(cls, process_model_identifier: str) -> list[str]: + location = process_model_identifier + locations = [] + + while location != "": + locations.append(location) + location = os.path.dirname(location) + + return locations diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_reference_cache_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_reference_cache_service.py index ceb2a0681..13534fd87 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_reference_cache_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_reference_cache_service.py @@ -41,17 +41,6 @@ def with_loaded_reference_cache(app: Flask, with_db_and_bpmn_file_cleanup: None) class TestReferenceCacheService(BaseTest): - def test_upsearch_locations( - self, - ) -> None: - locations = ReferenceCacheService.upsearch_locations("misc/jonjon/generic-data-store-area/test-level-2") - assert locations == [ - "misc/jonjon/generic-data-store-area/test-level-2", - "misc/jonjon/generic-data-store-area", - "misc/jonjon", - "misc", - ] - def test_can_find_data_store_in_current_location(self, with_loaded_reference_cache: None) -> None: location = ReferenceCacheService.upsearch( "misc/jonjon/generic-data-store-area/test-level-1", "contacts_datastore", "data_store" diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_upsearch_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_upsearch_service.py new file mode 100644 index 000000000..ede25b54b --- /dev/null +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_upsearch_service.py @@ -0,0 +1,16 @@ +from spiffworkflow_backend.services.upsearch_service import UpsearchService + +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + + +class TestUpsearchService(BaseTest): + def test_upsearch_locations( + self, + ) -> None: + locations = UpsearchService.upsearch_locations("misc/jonjon/generic-data-store-area/test-level-2") + assert locations == [ + "misc/jonjon/generic-data-store-area/test-level-2", + "misc/jonjon/generic-data-store-area", + "misc/jonjon", + "misc", + ] diff --git a/spiffworkflow-frontend/src/components/DataStoreForm.tsx b/spiffworkflow-frontend/src/components/DataStoreForm.tsx new file mode 100644 index 000000000..a2e17cade --- /dev/null +++ b/spiffworkflow-frontend/src/components/DataStoreForm.tsx @@ -0,0 +1,263 @@ +import { useEffect, useState } from 'react'; +import { useNavigate } from 'react-router-dom'; +// @ts-ignore +import { + Button, + ComboBox, + Form, + Stack, + TextInput, + TextArea, +} from '@carbon/react'; +import HttpService from '../services/HttpService'; +import { DataStore, DataStoreType } from '../interfaces'; +import { + modifyProcessIdentifierForPathParam, + truncateString, +} from '../helpers'; + +type OwnProps = { + mode: string; + dataStore: DataStore; + setDataStore: (..._args: any[]) => any; +}; + +export default function DataStoreForm({ + mode, + dataStore, + setDataStore, +}: OwnProps) { + const [identifierInvalid, setIdentifierInvalid] = useState(false); + const [idHasBeenUpdatedByUser, setIdHasBeenUpdatedByUser] = + useState(false); + const [nameInvalid, setNameInvalid] = useState(false); + const [typeInvalid, setTypeInvalid] = useState(false); + const [schemaInvalid, setSchemaInvalid] = useState(false); + const [dataStoreTypes, setDataStoreTypes] = useState<[DataStoreType] | []>( + [] + ); + const [selectedDataStoreType, setSelectedDataStoreType] = + useState(null); + const navigate = useNavigate(); + + const dataStoreLocation = () => { + const searchParams = new URLSearchParams(document.location.search); + const parentGroupId = searchParams.get('parentGroupId'); + + return parentGroupId ?? '/'; + }; + + useEffect(() => { + const handleSetDataStoreTypesCallback = (result: any) => { + setDataStoreTypes(result); + }; + + HttpService.makeCallToBackend({ + path: '/data-stores/types', + successCallback: handleSetDataStoreTypesCallback, + httpMethod: 'GET', + }); + }, [setDataStoreTypes]); + + const navigateToDataStores = (_result: any) => { + const location = dataStoreLocation(); + if (location !== '/') { + navigate( + `/process-groups/${modifyProcessIdentifierForPathParam(location)}` + ); + } else { + navigate(`/process-groups`); + } + }; + + const hasValidIdentifier = (identifierToCheck: string) => { + return identifierToCheck.match(/^[a-z][0-9a-z_]*[a-z0-9]$/); + }; + + const handleFormSubmission = (event: any) => { + const searchParams = new URLSearchParams(document.location.search); + const parentGroupId = searchParams.get('parentGroupId'); + + event.preventDefault(); + let hasErrors = false; + if (mode === 'new' && !hasValidIdentifier(dataStore.id)) { + setIdentifierInvalid(true); + hasErrors = true; + } + if (dataStore.name === '') { + setNameInvalid(true); + hasErrors = true; + } + if (selectedDataStoreType === null) { + setTypeInvalid(true); + hasErrors = true; + } + if (dataStore.schema === '') { + setSchemaInvalid(true); + hasErrors = true; + } + if (hasErrors) { + return; + } + let path = '/data-stores'; + let httpMethod = 'POST'; + if (mode === 'edit') { + path = `/data-stores/${dataStore.id}`; + httpMethod = 'PUT'; + } + const postBody = { + id: dataStore.id, + name: dataStore.name, + description: dataStore.description, + type: dataStore.type, + schema: dataStore.schema, + location: parentGroupId ?? '/', + }; + + HttpService.makeCallToBackend({ + path, + successCallback: navigateToDataStores, + httpMethod, + postBody, + }); + }; + + const updateDataStore = (newValues: any) => { + const dataStoreToCopy = { + ...dataStore, + }; + Object.assign(dataStoreToCopy, newValues); + setDataStore(dataStoreToCopy); + }; + + const makeIdentifier = (str: any) => { + return str + .toLowerCase() + .trim() + .replace(/[^\w\s-]/g, '') + .replace(/[\s-]+/g, '_') + .replace(/^[-\d]+/g, '') + .replace(/-+$/g, ''); + }; + + const onNameChanged = (newName: any) => { + setNameInvalid(false); + const updateDict = { name: newName }; + if (!idHasBeenUpdatedByUser && mode === 'new') { + Object.assign(updateDict, { id: makeIdentifier(newName) }); + } + updateDataStore(updateDict); + }; + + const onTypeChanged = (newType: any) => { + setTypeInvalid(false); + const newTypeSelection = newType.selectedItem; + const updateDict = { type: newTypeSelection.type }; + updateDataStore(updateDict); + setSelectedDataStoreType(newTypeSelection); + }; + + const onSchemaChanged = (newSchema: any) => { + setSchemaInvalid(false); + const updateDict = { schema: newSchema }; + updateDataStore(updateDict); + }; + + const formElements = () => { + const textInputs = [ + onNameChanged(event.target.value)} + />, + ]; + + if (mode === 'new') { + textInputs.push( + { + updateDataStore({ id: event.target.value }); + // was invalid, and now valid + if (identifierInvalid && hasValidIdentifier(event.target.value)) { + setIdentifierInvalid(false); + } + setIdHasBeenUpdatedByUser(true); + }} + /> + ); + } + + textInputs.push( + { + if (dataStoreType) { + return `${dataStoreType.name} (${truncateString( + dataStoreType.description, + 75 + )})`; + } + return null; + }} + titleText="Type*" + invalidText="Type is required." + invalid={typeInvalid} + placeholder="Choose the data store type" + selectedItem={selectedDataStoreType} + /> + ); + + textInputs.push( +