Table based generic json data store (#486)

This commit is contained in:
jbirddog 2023-09-13 11:57:26 -04:00 committed by GitHub
parent 9b1377fb6a
commit 45f85a7f77
6 changed files with 141 additions and 3 deletions

View File

@ -0,0 +1,42 @@
"""empty message
Revision ID: 55bbdeb6b635
Revises: 844cee572018
Create Date: 2023-09-11 10:30:38.559968
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '55bbdeb6b635'
down_revision = '844cee572018'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('json_data_store',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('location', sa.String(length=255), nullable=True),
sa.Column('data', sa.JSON(), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
with op.batch_alter_table('json_data_store', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_json_data_store_name'), ['name'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('json_data_store', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_json_data_store_name'))
op.drop_table('json_data_store')
# ### end Alembic commands ###

View File

@ -0,0 +1,75 @@
from typing import Any
from flask import current_app
from SpiffWorkflow.bpmn.serializer.helpers.spec import BpmnSpecConverter # type: ignore
from SpiffWorkflow.bpmn.specs.data_spec import BpmnDataStoreSpecification # type: ignore
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.json_data_store import JSONDataStoreModel
def _process_model_location_for_task(spiff_task: SpiffTask) -> str | None:
tld = current_app.config.get("THREAD_LOCAL_DATA")
if tld and hasattr(tld, "process_model_identifier"):
return tld.process_model_identifier # type: ignore
return None
class JSONDataStore(BpmnDataStoreSpecification): # type: ignore
"""JSONDataStore."""
def get(self, my_task: SpiffTask) -> None:
"""get."""
model: JSONDataStoreModel | None = None
location = _process_model_location_for_task(my_task)
if location is not None:
model = db.session.query(JSONDataStoreModel).filter_by(name=self.bpmn_id, location=location).first()
if model is None:
raise Exception(f"Unable to read from data store '{self.bpmn_id}' using location '{location}'.")
my_task.data[self.bpmn_id] = model.data
def set(self, my_task: SpiffTask) -> None:
"""set."""
location = _process_model_location_for_task(my_task)
if location is None:
raise Exception(f"Unable to write to data store '{self.bpmn_id}' using location '{location}'.")
data = my_task.data[self.bpmn_id]
model = JSONDataStoreModel(
name=self.bpmn_id,
location=location,
data=data,
)
db.session.query(JSONDataStoreModel).filter_by(name=self.bpmn_id, location=location).delete()
db.session.add(model)
db.session.commit()
del my_task.data[self.bpmn_id]
@staticmethod
def register_converter(spec_config: dict[str, Any]) -> None:
spec_config["task_specs"].append(JSONDataStoreConverter)
@staticmethod
def register_data_store_class(data_store_classes: dict[str, Any]) -> None:
data_store_classes["JSONDataStore"] = JSONDataStore
class JSONDataStoreConverter(BpmnSpecConverter): # type: ignore
"""JSONDataStoreConverter."""
def __init__(self, registry): # type: ignore
"""__init__."""
super().__init__(JSONDataStore, registry)
def to_dict(self, spec: Any) -> dict[str, Any]:
"""to_dict."""
return {
"bpmn_id": spec.bpmn_id,
"bpmn_name": spec.bpmn_name,
"capacity": spec.capacity,
"is_unlimited": spec.is_unlimited,
}
def from_dict(self, dct: dict[str, Any]) -> JSONDataStore:
"""from_dict."""
return JSONDataStore(**dct)

View File

@ -82,6 +82,9 @@ from spiffworkflow_backend.models.process_model_cycle import (
from spiffworkflow_backend.models.typeahead import (
TypeaheadModel,
) # noqa: F401
from spiffworkflow_backend.models.json_data_store import (
JSONDataStoreModel,
) # noqa: F401
from spiffworkflow_backend.models.task_draft_data import (
TaskDraftDataModel,
) # noqa: F401

View File

@ -0,0 +1,16 @@
from dataclasses import dataclass
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.db import db
@dataclass
class JSONDataStoreModel(SpiffworkflowBaseDBModel):
__tablename__ = "json_data_store"
id: int = db.Column(db.Integer, primary_key=True)
name: str = db.Column(db.String(255), index=True)
location: str = db.Column(db.String(255))
data: dict = db.Column(db.JSON)
updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer)

View File

@ -2,6 +2,7 @@ from typing import Any
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
from spiffworkflow_backend.data_stores.json import JSONDataStore
from spiffworkflow_backend.data_stores.typeahead import TypeaheadDataStore
from spiffworkflow_backend.specs.start_event import StartEvent
@ -16,4 +17,5 @@ class MyCustomParser(BpmnDmnParser): # type: ignore
DATA_STORE_CLASSES: dict[str, Any] = {}
JSONDataStore.register_data_store_class(DATA_STORE_CLASSES)
TypeaheadDataStore.register_data_store_class(DATA_STORE_CLASSES)

View File

@ -43,6 +43,7 @@ from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ign
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
from spiffworkflow_backend.data_stores.json import JSONDataStore
from spiffworkflow_backend.data_stores.typeahead import TypeaheadDataStore
from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
@ -91,6 +92,7 @@ from spiffworkflow_backend.specs.start_event import StartEvent
from sqlalchemy import and_
StartEvent.register_converter(SPIFF_SPEC_CONFIG)
JSONDataStore.register_converter(SPIFF_SPEC_CONFIG)
TypeaheadDataStore.register_converter(SPIFF_SPEC_CONFIG)
# Sorry about all this crap. I wanted to move this thing to another file, but
@ -416,9 +418,7 @@ class ProcessInstanceProcessor:
tld.process_instance_id = process_instance_model.id
# we want this to be the fully qualified path to the process model including all group subcomponents
current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = (
f"{process_instance_model.process_model_identifier}"
)
tld.process_model_identifier = f"{process_instance_model.process_model_identifier}"
self.process_instance_model = process_instance_model
bpmn_process_spec = None