Add data store at the process group level (#859)

This commit is contained in:
jbirddog 2024-01-10 09:48:31 -05:00 committed by GitHub
parent ab39569cac
commit a8a32b60fa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 783 additions and 102 deletions

View File

@ -0,0 +1,45 @@
"""empty message
Revision ID: 12a8864399d4
Revises: bc2b84d013e0
Create Date: 2023-12-19 08:07:12.265442
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '12a8864399d4'
down_revision = 'bc2b84d013e0'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('json_data_store', schema=None) as batch_op:
batch_op.drop_index('ix_json_data_store_name')
op.drop_table('json_data_store')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('json_data_store',
sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False),
sa.Column('name', mysql.VARCHAR(length=255), nullable=True),
sa.Column('location', mysql.VARCHAR(length=255), nullable=True),
sa.Column('data', mysql.JSON(), nullable=True),
sa.Column('updated_at_in_seconds', mysql.INTEGER(), autoincrement=False, nullable=True),
sa.Column('created_at_in_seconds', mysql.INTEGER(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_collate='utf8mb4_0900_ai_ci',
mysql_default_charset='utf8mb4',
mysql_engine='InnoDB'
)
with op.batch_alter_table('json_data_store', schema=None) as batch_op:
batch_op.create_index('ix_json_data_store_name', ['name'], unique=False)
# ### end Alembic commands ###

View File

@ -0,0 +1,48 @@
"""empty message
Revision ID: a872f8f2e909
Revises: 12a8864399d4
Create Date: 2023-12-19 08:40:26.572613
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a872f8f2e909'
down_revision = '12a8864399d4'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('json_data_store',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('identifier', sa.String(length=255), nullable=False),
sa.Column('location', sa.String(length=255), nullable=False),
sa.Column('schema', sa.JSON(), nullable=False),
sa.Column('data', sa.JSON(), nullable=False),
sa.Column('description', sa.String(length=255), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=False),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('identifier', 'location', name='_identifier_location_unique')
)
with op.batch_alter_table('json_data_store', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_json_data_store_identifier'), ['identifier'], unique=False)
batch_op.create_index(batch_op.f('ix_json_data_store_name'), ['name'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('json_data_store', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_json_data_store_name'))
batch_op.drop_index(batch_op.f('ix_json_data_store_identifier'))
op.drop_table('json_data_store')
# ### end Alembic commands ###

View File

@ -2770,6 +2770,28 @@ paths:
responses:
"200":
description: The list of currently defined data store objects
post:
operationId: spiffworkflow_backend.routes.data_store_controller.data_store_create
summary: Create a new data store instance.
requestBody:
content:
application/json:
schema:
$ref: "#/components/schemas/DataStore"
tags:
- Data Stores
responses:
"200":
description: The newly created data store instance
/data-stores/types:
get:
operationId: spiffworkflow_backend.routes.data_store_controller.data_store_types
summary: Return a list of the data store types.
tags:
- Data Stores
responses:
"200":
description: The list of currently defined data store types
/data-stores/{data_store_type}/{name}:
parameters:
- name: data_store_type
@ -2998,31 +3020,22 @@ components:
DataStore:
properties:
id:
type: integer
example: 1234
key:
type: string
example: MyKey
workflow_id:
type: integer
x-nullable: true
example: 12
user_id:
example: employees
name:
type: string
example: Emplyoees DataStore
type:
type: string
example: TypeaheadDataStore
description:
type: string
x-nullable: true
example: dhf8r
task_id:
example: This data store contains all the employees
parent_group_id:
type: string
x-nullable: true
example: MyTask
process_model_id:
type: string
x-nullable: true
example: My Spec Name
value:
type: string
x-nullable: true
example: Some Value
example: Optional parent group id to specify the location of this data store
Process:
properties:
identifier:

View File

@ -2,6 +2,10 @@ from typing import Any
class DataStoreCRUD:
@staticmethod
def create_instance(name: str, identifier: str, location: str, schema: dict[str, Any], description: str | None) -> None:
raise Exception("must implement")
@staticmethod
def existing_data_stores() -> list[dict[str, Any]]:
raise Exception("must implement")

View File

@ -10,6 +10,15 @@ from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.json_data_store import JSONDataStoreModel
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.reference_cache_service import ReferenceCacheService
from spiffworkflow_backend.services.upsearch_service import UpsearchService
class DataStoreReadError(Exception):
pass
class DataStoreWriteError(Exception):
pass
def _process_model_location_for_task(spiff_task: SpiffTask) -> str | None:
@ -19,29 +28,22 @@ def _process_model_location_for_task(spiff_task: SpiffTask) -> str | None:
return None
def _data_store_filename(name: str) -> str:
return f"{name}.json"
def _data_store_exists_at_location(location: str, name: str) -> bool:
return FileSystemService.file_exists_at_relative_path(location, _data_store_filename(name))
def _data_store_location_for_task(spiff_task: SpiffTask, name: str) -> str | None:
location = _process_model_location_for_task(spiff_task)
if location is None:
return None
if _data_store_exists_at_location(location, name):
return location
location = ReferenceCacheService.upsearch(location, name, "data_store")
if location is None or not _data_store_exists_at_location(location, name):
return None
return location
class JSONDataStore(BpmnDataStoreSpecification, DataStoreCRUD): # type: ignore
"""JSONDataStore."""
@staticmethod
def create_instance(name: str, identifier: str, location: str, schema: dict[str, Any], description: str | None) -> None:
model = JSONDataStoreModel(
name=name,
identifier=identifier,
location=location,
schema=schema,
description=description or "",
data={},
)
db.session.add(model)
db.session.commit()
@staticmethod
def existing_data_stores() -> list[dict[str, Any]]:
data_stores = []
@ -58,35 +60,55 @@ class JSONDataStore(BpmnDataStoreSpecification, DataStoreCRUD): # type: ignore
@staticmethod
def build_response_item(model: Any) -> dict[str, Any]:
return {"location": model.location, "data": model.data}
return {"location": model.location, "identifier": model.identifier, "data": model.data}
def get(self, my_task: SpiffTask) -> None:
"""get."""
model: JSONDataStoreModel | None = None
location = _data_store_location_for_task(my_task, self.bpmn_id)
location = self._data_store_location_for_task(my_task, self.bpmn_id)
if location is not None:
model = db.session.query(JSONDataStoreModel).filter_by(name=self.bpmn_id, location=location).first()
model = db.session.query(JSONDataStoreModel).filter_by(identifier=self.bpmn_id, location=location).first()
if model is None:
raise Exception(f"Unable to read from data store '{self.bpmn_id}' using location '{location}'.")
raise DataStoreReadError(f"Unable to read from data store '{self.bpmn_id}' using location '{location}'.")
my_task.data[self.bpmn_id] = model.data
def set(self, my_task: SpiffTask) -> None:
"""set."""
location = _data_store_location_for_task(my_task, self.bpmn_id)
if location is None:
raise Exception(f"Unable to write to data store '{self.bpmn_id}' using location '{location}'.")
data = my_task.data[self.bpmn_id]
model = JSONDataStoreModel(
name=self.bpmn_id,
location=location,
data=data,
)
model: JSONDataStoreModel | None = None
location = self._data_store_location_for_task(my_task, self.bpmn_id)
if location is not None:
model = JSONDataStoreModel.query.filter_by(identifier=self.bpmn_id, location=location).first()
if location is None or model is None:
raise DataStoreWriteError(f"Unable to write to data store '{self.bpmn_id}' using location '{location}'.")
data = my_task.data[self.bpmn_id]
# TODO: validate data against schema
model.data = data
db.session.query(JSONDataStoreModel).filter_by(name=self.bpmn_id, location=location).delete()
db.session.add(model)
db.session.commit()
del my_task.data[self.bpmn_id]
def _data_store_location_for_task(self, spiff_task: SpiffTask, identifier: str) -> str | None:
location = _process_model_location_for_task(spiff_task)
if location is None:
return None
locations = UpsearchService.upsearch_locations(location)
model = (
JSONDataStoreModel.query.filter_by(identifier=identifier)
.filter(JSONDataStoreModel.location.in_(locations)) # type: ignore
.order_by(JSONDataStoreModel.location.desc()) # type: ignore
.first()
)
if model is None:
return None
return model.location # type: ignore
@staticmethod
def register_data_store_class(data_store_classes: dict[str, Any]) -> None:
data_store_classes["JSONDataStore"] = JSONDataStore
@ -114,21 +136,40 @@ class JSONFileDataStore(BpmnDataStoreSpecification): # type: ignore
def get(self, my_task: SpiffTask) -> None:
"""get."""
location = _data_store_location_for_task(my_task, self.bpmn_id)
location = self._data_store_location_for_task(my_task, self.bpmn_id)
if location is None:
raise Exception(f"Unable to read from data store '{self.bpmn_id}' using location '{location}'.")
contents = FileSystemService.contents_of_json_file_at_relative_path(location, _data_store_filename(self.bpmn_id))
raise DataStoreReadError(f"Unable to read from data store '{self.bpmn_id}' using location '{location}'.")
contents = FileSystemService.contents_of_json_file_at_relative_path(location, self._data_store_filename(self.bpmn_id))
my_task.data[self.bpmn_id] = contents
def set(self, my_task: SpiffTask) -> None:
"""set."""
location = _data_store_location_for_task(my_task, self.bpmn_id)
location = self._data_store_location_for_task(my_task, self.bpmn_id)
if location is None:
raise Exception(f"Unable to write to data store '{self.bpmn_id}' using location '{location}'.")
raise DataStoreWriteError(f"Unable to write to data store '{self.bpmn_id}' using location '{location}'.")
data = my_task.data[self.bpmn_id]
FileSystemService.write_to_json_file_at_relative_path(location, _data_store_filename(self.bpmn_id), data)
FileSystemService.write_to_json_file_at_relative_path(location, self._data_store_filename(self.bpmn_id), data)
del my_task.data[self.bpmn_id]
def _data_store_location_for_task(self, spiff_task: SpiffTask, identifier: str) -> str | None:
location = _process_model_location_for_task(spiff_task)
if location is None:
return None
if self._data_store_exists_at_location(location, identifier):
return location
location = ReferenceCacheService.upsearch(location, identifier, "data_store")
if location is None:
return None
if not self._data_store_exists_at_location(location, identifier):
return None
return location
def _data_store_exists_at_location(self, location: str, identifier: str) -> bool:
return FileSystemService.file_exists_at_relative_path(location, self._data_store_filename(identifier))
def _data_store_filename(self, name: str) -> str:
return f"{name}.json"
@staticmethod
def register_data_store_class(data_store_classes: dict[str, Any]) -> None:
data_store_classes["JSONFileDataStore"] = JSONFileDataStore

View File

@ -1,5 +1,7 @@
from dataclasses import dataclass
from sqlalchemy import UniqueConstraint
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.db import db
@ -7,10 +9,14 @@ from spiffworkflow_backend.models.db import db
@dataclass
class JSONDataStoreModel(SpiffworkflowBaseDBModel):
__tablename__ = "json_data_store"
__table_args__ = (UniqueConstraint("identifier", "location", name="_identifier_location_unique"),)
id: int = db.Column(db.Integer, primary_key=True)
name: str = db.Column(db.String(255), index=True)
location: str = db.Column(db.String(255))
data: dict = db.Column(db.JSON)
updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer)
name: str = db.Column(db.String(255), index=True, nullable=False)
identifier: str = db.Column(db.String(255), index=True, nullable=False)
location: str = db.Column(db.String(255), nullable=False)
schema: dict = db.Column(db.JSON, nullable=False)
data: dict = db.Column(db.JSON, nullable=False)
description: str = db.Column(db.String(255))
updated_at_in_seconds: int = db.Column(db.Integer, nullable=False)
created_at_in_seconds: int = db.Column(db.Integer, nullable=False)

View File

@ -1,5 +1,5 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
from typing import Any
import flask.wrappers
@ -11,6 +11,12 @@ from spiffworkflow_backend.data_stores.kkv import KKVDataStore
from spiffworkflow_backend.data_stores.typeahead import TypeaheadDataStore
from spiffworkflow_backend.exceptions.api_error import ApiError
DATA_STORES = {
"json": (JSONDataStore, "JSON Data Store"),
"kkv": (KKVDataStore, "Keyed Key-Value Data Store"),
"typeahead": (TypeaheadDataStore, "Typeahead Data Store"),
}
def data_store_list() -> flask.wrappers.Response:
"""Returns a list of the names of all the data stores."""
@ -25,6 +31,16 @@ def data_store_list() -> flask.wrappers.Response:
return make_response(jsonify(data_stores), 200)
def data_store_types() -> flask.wrappers.Response:
"""Returns a list of the types of available data stores."""
# this if == "json" check is temporary while we roll out support for other data stores
# being created with locations, identifiers and schemas
data_store_types = [{"type": k, "name": v[0].__name__, "description": v[1]} for k, v in DATA_STORES.items() if k == "json"]
return make_response(jsonify(data_store_types), 200)
def _build_response(data_store_class: Any, name: str, page: int, per_page: int) -> flask.wrappers.Response:
data_store_query = data_store_class.query_data_store(name)
data = data_store_query.paginate(page=page, per_page=per_page, error_out=False)
@ -46,13 +62,41 @@ def _build_response(data_store_class: Any, name: str, page: int, per_page: int)
def data_store_item_list(data_store_type: str, name: str, page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
"""Returns a list of the items in a data store."""
if data_store_type == "typeahead":
return _build_response(TypeaheadDataStore, name, page, per_page)
if data_store_type == "kkv":
return _build_response(KKVDataStore, name, page, per_page)
if data_store_type == "json":
return _build_response(JSONDataStore, name, page, per_page)
if data_store_type not in DATA_STORES:
raise ApiError("unknown_data_store", f"Unknown data store type: {data_store_type}", status_code=400)
data_store_class, _ = DATA_STORES[data_store_type]
return _build_response(data_store_class, name, page, per_page)
def data_store_create(body: dict) -> flask.wrappers.Response:
try:
data_store_type = body["type"]
name = body["name"]
identifier = body["id"]
location = body["location"]
description = body.get("description")
schema = body["schema"]
except Exception as e:
raise ApiError(
"data_store_required_key_missing",
"A valid JSON Schema is required when creating a new data store instance.",
status_code=400,
) from e
try:
schema = json.loads(schema)
except Exception as e:
raise ApiError(
"data_store_invalid_schema",
"A valid JSON Schema is required when creating a new data store instance.",
status_code=400,
) from e
if data_store_type not in DATA_STORES:
raise ApiError("unknown_data_store", f"Unknown data store type: {data_store_type}", status_code=400)
data_store_class, _ = DATA_STORES[data_store_type]
data_store_class.create_instance(name, identifier, location, schema, description)
return make_response(jsonify({"ok": True}), 200)

View File

@ -1,10 +1,9 @@
import os
from sqlalchemy import insert
from spiffworkflow_backend.models.cache_generation import CacheGenerationModel
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
from spiffworkflow_backend.services.upsearch_service import UpsearchService
class ReferenceCacheService:
@ -37,7 +36,7 @@ class ReferenceCacheService:
cache_generation = CacheGenerationModel.newest_generation_for_table("reference_cache")
if cache_generation is None:
return None
locations = cls.upsearch_locations(location)
locations = UpsearchService.upsearch_locations(location)
references = (
ReferenceCacheModel.query.filter_by(
identifier=identifier,
@ -54,13 +53,3 @@ class ReferenceCacheService:
return reference.relative_location # type: ignore
return None
@classmethod
def upsearch_locations(cls, location: str) -> list[str]:
locations = []
while location != "":
locations.append(location)
location = os.path.dirname(location)
return locations

View File

@ -0,0 +1,14 @@
import os
class UpsearchService:
@classmethod
def upsearch_locations(cls, process_model_identifier: str) -> list[str]:
location = process_model_identifier
locations = []
while location != "":
locations.append(location)
location = os.path.dirname(location)
return locations

View File

@ -41,17 +41,6 @@ def with_loaded_reference_cache(app: Flask, with_db_and_bpmn_file_cleanup: None)
class TestReferenceCacheService(BaseTest):
def test_upsearch_locations(
self,
) -> None:
locations = ReferenceCacheService.upsearch_locations("misc/jonjon/generic-data-store-area/test-level-2")
assert locations == [
"misc/jonjon/generic-data-store-area/test-level-2",
"misc/jonjon/generic-data-store-area",
"misc/jonjon",
"misc",
]
def test_can_find_data_store_in_current_location(self, with_loaded_reference_cache: None) -> None:
location = ReferenceCacheService.upsearch(
"misc/jonjon/generic-data-store-area/test-level-1", "contacts_datastore", "data_store"

View File

@ -0,0 +1,16 @@
from spiffworkflow_backend.services.upsearch_service import UpsearchService
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
class TestUpsearchService(BaseTest):
def test_upsearch_locations(
self,
) -> None:
locations = UpsearchService.upsearch_locations("misc/jonjon/generic-data-store-area/test-level-2")
assert locations == [
"misc/jonjon/generic-data-store-area/test-level-2",
"misc/jonjon/generic-data-store-area",
"misc/jonjon",
"misc",
]

View File

@ -0,0 +1,263 @@
import { useEffect, useState } from 'react';
import { useNavigate } from 'react-router-dom';
// @ts-ignore
import {
Button,
ComboBox,
Form,
Stack,
TextInput,
TextArea,
} from '@carbon/react';
import HttpService from '../services/HttpService';
import { DataStore, DataStoreType } from '../interfaces';
import {
modifyProcessIdentifierForPathParam,
truncateString,
} from '../helpers';
type OwnProps = {
mode: string;
dataStore: DataStore;
setDataStore: (..._args: any[]) => any;
};
export default function DataStoreForm({
mode,
dataStore,
setDataStore,
}: OwnProps) {
const [identifierInvalid, setIdentifierInvalid] = useState<boolean>(false);
const [idHasBeenUpdatedByUser, setIdHasBeenUpdatedByUser] =
useState<boolean>(false);
const [nameInvalid, setNameInvalid] = useState<boolean>(false);
const [typeInvalid, setTypeInvalid] = useState<boolean>(false);
const [schemaInvalid, setSchemaInvalid] = useState<boolean>(false);
const [dataStoreTypes, setDataStoreTypes] = useState<[DataStoreType] | []>(
[]
);
const [selectedDataStoreType, setSelectedDataStoreType] =
useState<DataStoreType | null>(null);
const navigate = useNavigate();
const dataStoreLocation = () => {
const searchParams = new URLSearchParams(document.location.search);
const parentGroupId = searchParams.get('parentGroupId');
return parentGroupId ?? '/';
};
useEffect(() => {
const handleSetDataStoreTypesCallback = (result: any) => {
setDataStoreTypes(result);
};
HttpService.makeCallToBackend({
path: '/data-stores/types',
successCallback: handleSetDataStoreTypesCallback,
httpMethod: 'GET',
});
}, [setDataStoreTypes]);
const navigateToDataStores = (_result: any) => {
const location = dataStoreLocation();
if (location !== '/') {
navigate(
`/process-groups/${modifyProcessIdentifierForPathParam(location)}`
);
} else {
navigate(`/process-groups`);
}
};
const hasValidIdentifier = (identifierToCheck: string) => {
return identifierToCheck.match(/^[a-z][0-9a-z_]*[a-z0-9]$/);
};
const handleFormSubmission = (event: any) => {
const searchParams = new URLSearchParams(document.location.search);
const parentGroupId = searchParams.get('parentGroupId');
event.preventDefault();
let hasErrors = false;
if (mode === 'new' && !hasValidIdentifier(dataStore.id)) {
setIdentifierInvalid(true);
hasErrors = true;
}
if (dataStore.name === '') {
setNameInvalid(true);
hasErrors = true;
}
if (selectedDataStoreType === null) {
setTypeInvalid(true);
hasErrors = true;
}
if (dataStore.schema === '') {
setSchemaInvalid(true);
hasErrors = true;
}
if (hasErrors) {
return;
}
let path = '/data-stores';
let httpMethod = 'POST';
if (mode === 'edit') {
path = `/data-stores/${dataStore.id}`;
httpMethod = 'PUT';
}
const postBody = {
id: dataStore.id,
name: dataStore.name,
description: dataStore.description,
type: dataStore.type,
schema: dataStore.schema,
location: parentGroupId ?? '/',
};
HttpService.makeCallToBackend({
path,
successCallback: navigateToDataStores,
httpMethod,
postBody,
});
};
const updateDataStore = (newValues: any) => {
const dataStoreToCopy = {
...dataStore,
};
Object.assign(dataStoreToCopy, newValues);
setDataStore(dataStoreToCopy);
};
const makeIdentifier = (str: any) => {
return str
.toLowerCase()
.trim()
.replace(/[^\w\s-]/g, '')
.replace(/[\s-]+/g, '_')
.replace(/^[-\d]+/g, '')
.replace(/-+$/g, '');
};
const onNameChanged = (newName: any) => {
setNameInvalid(false);
const updateDict = { name: newName };
if (!idHasBeenUpdatedByUser && mode === 'new') {
Object.assign(updateDict, { id: makeIdentifier(newName) });
}
updateDataStore(updateDict);
};
const onTypeChanged = (newType: any) => {
setTypeInvalid(false);
const newTypeSelection = newType.selectedItem;
const updateDict = { type: newTypeSelection.type };
updateDataStore(updateDict);
setSelectedDataStoreType(newTypeSelection);
};
const onSchemaChanged = (newSchema: any) => {
setSchemaInvalid(false);
const updateDict = { schema: newSchema };
updateDataStore(updateDict);
};
const formElements = () => {
const textInputs = [
<TextInput
id="data-store-name"
data-qa="data-store-name-input"
name="name"
invalidText="Name is required."
invalid={nameInvalid}
labelText="Name*"
value={dataStore.name}
onChange={(event: any) => onNameChanged(event.target.value)}
/>,
];
if (mode === 'new') {
textInputs.push(
<TextInput
id="data-store-identifier"
name="id"
invalidText="Identifier is required and must be all lowercase characters and hyphens."
invalid={identifierInvalid}
labelText="Identifier*"
value={dataStore.id}
onChange={(event: any) => {
updateDataStore({ id: event.target.value });
// was invalid, and now valid
if (identifierInvalid && hasValidIdentifier(event.target.value)) {
setIdentifierInvalid(false);
}
setIdHasBeenUpdatedByUser(true);
}}
/>
);
}
textInputs.push(
<ComboBox
onChange={onTypeChanged}
id="data-store-type-select"
data-qa="data-store-type-selection"
items={dataStoreTypes}
itemToString={(dataStoreType: DataStoreType) => {
if (dataStoreType) {
return `${dataStoreType.name} (${truncateString(
dataStoreType.description,
75
)})`;
}
return null;
}}
titleText="Type*"
invalidText="Type is required."
invalid={typeInvalid}
placeholder="Choose the data store type"
selectedItem={selectedDataStoreType}
/>
);
textInputs.push(
<TextArea
id="data-store-schema"
name="schema"
invalidText="Schema is required and must be valid JSON."
invalid={schemaInvalid}
labelText="Schema"
value={dataStore.schema}
onChange={(event: any) => onSchemaChanged(event.target.value)}
/>
);
textInputs.push(
<TextArea
id="data-store-description"
name="description"
labelText="Description"
value={dataStore.description}
onChange={(event: any) =>
updateDataStore({ description: event.target.value })
}
/>
);
return textInputs;
};
const formButtons = () => {
return <Button type="submit">Submit</Button>;
};
return (
<Form onSubmit={handleFormSubmission}>
<Stack gap={5}>
{formElements()}
{formButtons()}
</Stack>
</Form>
);
}

View File

@ -0,0 +1,138 @@
import { useEffect, useState } from 'react';
import {
Dropdown,
Table,
TableHead,
TableHeader,
TableRow,
} from '@carbon/react';
import { TableBody, TableCell } from '@mui/material';
import { useSearchParams } from 'react-router-dom';
import HttpService from '../services/HttpService';
import { DataStore, DataStoreRecords, PaginationObject } from '../interfaces';
import PaginationForTable from './PaginationForTable';
import { getPageInfoFromSearchParams } from '../helpers';
export default function DataStoreListTable() {
const [dataStores, setDataStores] = useState<DataStore[]>([]);
const [dataStore, setDataStore] = useState<DataStore | null>(null);
const [pagination, setPagination] = useState<PaginationObject | null>(null);
const [results, setResults] = useState<any[]>([]);
const [searchParams, setSearchParams] = useSearchParams();
useEffect(() => {
HttpService.makeCallToBackend({
path: `/data-stores`,
successCallback: (newStores: DataStore[]) => {
setDataStores(newStores);
},
});
}, []); // Do this once so we have a list of data stores to select from.
useEffect(() => {
const { page, perPage } = getPageInfoFromSearchParams(
searchParams,
10,
1,
'datastore'
);
const dataStoreType = searchParams.get('type') || '';
const dataStoreName = searchParams.get('name') || '';
if (dataStoreType === '' || dataStoreName === '') {
return;
}
if (dataStores && dataStoreName && dataStoreType) {
dataStores.forEach((ds) => {
if (ds.name === dataStoreName && ds.type === dataStoreType) {
setDataStore(ds);
}
});
}
const queryParamString = `per_page=${perPage}&page=${page}`;
HttpService.makeCallToBackend({
path: `/data-stores/${dataStoreType}/${dataStoreName}?${queryParamString}`,
successCallback: (response: DataStoreRecords) => {
setResults(response.results);
setPagination(response.pagination);
},
});
}, [dataStores, searchParams]);
const getCell = (value: any) => {
const valueToUse =
typeof value === 'object' ? (
<pre>
<code>{JSON.stringify(value, null, 4)}</code>
</pre>
) : (
value
);
return <TableCell>{valueToUse}</TableCell>;
};
const getTable = () => {
if (results.length === 0) {
return null;
}
const firstResult = results[0];
const tableHeaders: any[] = [];
const keys = Object.keys(firstResult);
keys.forEach((key) => tableHeaders.push(<TableHeader>{key}</TableHeader>));
return (
<Table striped bordered>
<TableHead>
<TableRow>{tableHeaders}</TableRow>
</TableHead>
<TableBody>
{results.map((object) => {
return (
<TableRow>
{keys.map((key) => {
return getCell(object[key]);
})}
</TableRow>
);
})}
</TableBody>
</Table>
);
};
const { page, perPage } = getPageInfoFromSearchParams(
searchParams,
10,
1,
'datastore'
);
return (
<>
<Dropdown
id="data-store-dropdown"
titleText="Select Data Store"
helperText="Select the data store you wish to view"
label="Please select a data store"
items={dataStores}
selectedItem={dataStore}
itemToString={(ds: DataStore) => (ds ? `${ds.name} (${ds.type})` : '')}
onChange={(event: any) => {
setDataStore(event.selectedItem);
searchParams.set('datastore_page', '1');
searchParams.set('datastore_per_page', '10');
searchParams.set('type', event.selectedItem.type);
searchParams.set('name', event.selectedItem.name);
setSearchParams(searchParams);
}}
/>
<PaginationForTable
page={page}
perPage={perPage}
pagination={pagination}
tableToDisplay={getTable()}
paginationQueryParamPrefix="datastore"
/>
</>
);
}

View File

@ -460,6 +460,15 @@ export interface DataStoreRecords {
export interface DataStore {
name: string;
type: string;
id: string;
schema: string;
description?: string | null;
}
export interface DataStoreType {
type: string;
name: string;
description: string;
}
export interface JsonSchemaExample {

View File

@ -1,7 +1,7 @@
import { Route, Routes } from 'react-router-dom';
import Configuration from './Configuration';
import MessageListPage from './MessageListPage';
import DataStorePage from './DataStorePage';
import DataStoreRoutes from './DataStoreRoutes';
import { UiSchemaUxElement } from '../extension_ui_schema_interfaces';
import HomeRoutes from './HomeRoutes';
import ProcessGroupRoutes from './ProcessGroupRoutes';
@ -39,7 +39,7 @@ export default function BaseRoutes({ extensionUxElements }: OwnProps) {
element={<Configuration extensionUxElements={extensionUxElements} />}
/>
<Route path="messages" element={<MessageListPage />} />
<Route path="data-stores" element={<DataStorePage />} />
<Route path="data-stores/*" element={<DataStoreRoutes />} />
<Route path="about" element={<About />} />
<Route path="admin/*" element={<AdminRedirect />} />
<Route path="/*" element={<Page404 />} />

View File

@ -1,13 +1,13 @@
import React from 'react';
import DataStoreList from '../components/DataStoreList';
import DataStoreListTable from '../components/DataStoreListTable';
import { setPageTitle } from '../helpers';
export default function DataStorePage() {
export default function DataStoreList() {
setPageTitle(['Data Stores']);
return (
<>
<h1>Data Stores</h1>
<DataStoreList />
<DataStoreListTable />
</>
);
}

View File

@ -0,0 +1,42 @@
import { useEffect, useState } from 'react';
import { useSearchParams } from 'react-router-dom';
import ProcessBreadcrumb from '../components/ProcessBreadcrumb';
import DataStoreForm from '../components/DataStoreForm';
import { DataStore, HotCrumbItem } from '../interfaces';
import { setPageTitle } from '../helpers';
export default function DataStoreNew() {
const [searchParams] = useSearchParams();
const parentGroupId = searchParams.get('parentGroupId');
const [dataStore, setDataStore] = useState<DataStore>({
id: '',
name: '',
type: '',
schema: '',
description: '',
});
useEffect(() => {
setPageTitle(['New Data Store']);
}, []);
const hotCrumbs: HotCrumbItem[] = [['Process Groups', '/process-groups']];
if (parentGroupId) {
hotCrumbs.push({
entityToExplode: parentGroupId,
entityType: 'process-group-id',
linkLastItem: true,
});
}
return (
<>
<ProcessBreadcrumb hotCrumbs={hotCrumbs} />
<h1>Add Data Store</h1>
<DataStoreForm
mode="new"
dataStore={dataStore}
setDataStore={setDataStore}
/>
</>
);
}

View File

@ -0,0 +1,12 @@
import { Route, Routes } from 'react-router-dom';
import DataStoreList from './DataStoreList';
import DataStoreNew from './DataStoreNew';
export default function DataStoreRoutes() {
return (
<Routes>
<Route path="/" element={<DataStoreList />} />
<Route path="new" element={<DataStoreNew />} />
</Routes>
);
}

View File

@ -26,6 +26,7 @@ export default function ProcessGroupShow() {
const { targetUris } = useUriListForPermissions();
const permissionRequestData: PermissionsToCheck = {
[targetUris.dataStoreListPath]: ['POST'],
[targetUris.processGroupListPath]: ['POST'],
[targetUris.processGroupShowPath]: ['PUT', 'DELETE'],
[targetUris.processModelCreatePath]: ['POST'],
@ -131,6 +132,13 @@ export default function ProcessGroupShow() {
Add a process model
</Button>
</Can>
<Can I="POST" a={targetUris.dataStoreListPath} ability={ability}>
<Button
href={`/data-stores/new?parentGroupId=${processGroup.id}`}
>
Add a data store
</Button>
</Can>
</Stack>
<br />
<br />