Feature/api keys (#489)

* some initial work to support user api keys w/ burnettk

* some updates to store and use service accounts - migrations do not work in sqlite atm

* pyl

* minor tweak to the migration

* refactored user route

* this is working if returning user that created the service account

* put back migrations from main w/ burnettk

* tests pass with new migration w/ burnettk

* do not remove service account permissions on refresh_permissions w/ burnettk

* added new component to make some api calls to populate child components and routes w/ burnettk

* allow displaying extensions in configuration tab w/ burnettk

* removed service accounts controller in favor of extension and encrypt the api keys

* add fuzz to username to make deleting and recreating service accounts easier

* allow specifying the process id to use when running an extension w/ burnettk

* allow extensions to navigate to each other on form submit w/ burnettk

* removed commented out debug code

---------

Co-authored-by: jasquat <jasquat@users.noreply.github.com>
This commit is contained in:
jasquat 2023-09-15 10:10:57 -04:00 committed by GitHub
parent daddf639ad
commit f6d3bc8e73
25 changed files with 873 additions and 378 deletions

View File

@ -0,0 +1,52 @@
"""empty message
Revision ID: 9d5b6c5c31a5
Revises: 55bbdeb6b635
Create Date: 2023-09-14 08:49:53.619192
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '9d5b6c5c31a5'
down_revision = '55bbdeb6b635'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('service_account',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('created_by_user_id', sa.Integer(), nullable=False),
sa.Column('api_key_hash', sa.String(length=255), nullable=False),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['created_by_user_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name', 'created_by_user_id', name='service_account_uniq')
)
with op.batch_alter_table('service_account', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_service_account_api_key_hash'), ['api_key_hash'], unique=True)
batch_op.create_index(batch_op.f('ix_service_account_created_by_user_id'), ['created_by_user_id'], unique=False)
batch_op.create_index(batch_op.f('ix_service_account_name'), ['name'], unique=False)
batch_op.create_index(batch_op.f('ix_service_account_user_id'), ['user_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('service_account', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_service_account_user_id'))
batch_op.drop_index(batch_op.f('ix_service_account_name'))
batch_op.drop_index(batch_op.f('ix_service_account_created_by_user_id'))
batch_op.drop_index(batch_op.f('ix_service_account_api_key_hash'))
op.drop_table('service_account')
# ### end Alembic commands ###

View File

@ -25,7 +25,7 @@ from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import migrate from spiffworkflow_backend.models.db import migrate
from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import openid_blueprint from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import openid_blueprint
from spiffworkflow_backend.routes.user import set_new_access_token_in_cookie from spiffworkflow_backend.routes.user import _set_new_access_token_in_cookie
from spiffworkflow_backend.routes.user import verify_token from spiffworkflow_backend.routes.user import verify_token
from spiffworkflow_backend.routes.user_blueprint import user_blueprint from spiffworkflow_backend.routes.user_blueprint import user_blueprint
from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import AuthorizationService
@ -182,7 +182,7 @@ def create_app() -> flask.app.Flask:
app.before_request(verify_token) app.before_request(verify_token)
app.before_request(AuthorizationService.check_for_permission) app.before_request(AuthorizationService.check_for_permission)
app.after_request(set_new_access_token_in_cookie) app.after_request(_set_new_access_token_in_cookie)
# The default is true, but we want to preserve the order of keys in the json # The default is true, but we want to preserve the order of keys in the json
# This is particularly helpful for forms that are generated from json schemas. # This is particularly helpful for forms that are generated from json schemas.

View File

@ -3457,3 +3457,14 @@ components:
fiilterable: fiilterable:
type: string type: string
nullable: false nullable: false
ServiceAccountRequest:
properties:
name:
type: string
nullable: false
ServiceAccountApiKey:
properties:
api_key:
type: string
nullable: false

View File

@ -94,5 +94,8 @@ from spiffworkflow_backend.models.configuration import (
from spiffworkflow_backend.models.user_property import ( from spiffworkflow_backend.models.user_property import (
UserPropertyModel, UserPropertyModel,
) # noqa: F401 ) # noqa: F401
from spiffworkflow_backend.models.service_account import (
ServiceAccountModel,
) # noqa: F401
add_listeners() add_listeners()

View File

@ -14,6 +14,11 @@ db = SQLAlchemy()
migrate = Migrate() migrate = Migrate()
# NOTE: ensure all db models are added to src/spiffworkflow_backend/load_database_models.py so that:
# 1) they will be loaded in time for add_listeners. otherwise they may not auto-update created_at and updated_at times
# 2) database migration code picks them up when migrations are automatically generated
class SpiffworkflowBaseDBModel(db.Model): # type: ignore class SpiffworkflowBaseDBModel(db.Model): # type: ignore
__abstract__ = True __abstract__ = True

View File

@ -0,0 +1,55 @@
from __future__ import annotations
import time
import uuid
from dataclasses import dataclass
from hashlib import sha256
from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.db import db
# this is designed to be used for the "service" column on the user table, which is designed to hold
# information about which authentiation system is used to authenticate this user.
# in this case, we are authenticating based on X-API-KEY which correlates to a known value in the spiff db.
SPIFF_SERVICE_ACCOUNT_AUTH_SERVICE = "spiff_service_account"
SPIFF_SERVICE_ACCOUNT_AUTH_SERVICE_ID_PREFIX = "service_account_"
@dataclass
class ServiceAccountModel(SpiffworkflowBaseDBModel):
__tablename__ = "service_account"
__allow_unmapped__ = True
__table_args__ = (db.UniqueConstraint("name", "created_by_user_id", name="service_account_uniq"),)
id: int = db.Column(db.Integer, primary_key=True)
name: str = db.Column(db.String(255), nullable=False, unique=False, index=True)
user_id: int = db.Column(ForeignKey("user.id"), nullable=False, index=True)
created_by_user_id: int = db.Column(ForeignKey("user.id"), nullable=False, index=True)
api_key_hash: str = db.Column(db.String(255), nullable=False, unique=True, index=True)
user = relationship("UserModel", uselist=False, cascade="delete", foreign_keys=[user_id]) # type: ignore
updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer)
# only to used when the service account first created to tell the user what the key is
api_key: str | None = None
@classmethod
def generate_api_key(cls) -> str:
return str(uuid.uuid4())
@classmethod
def hash_api_key(cls, unencrypted_api_key: str) -> str:
return sha256(unencrypted_api_key.encode("utf8")).hexdigest()
@classmethod
def generate_username_for_related_user(cls, service_account_name: str, created_by_user_id: int) -> str:
# add fuzz to username so a user can delete and recreate an api_key with the same name
# also make the username readable so we know where it came from even after the service account is deleted
creation_time_for_fuzz = time.time()
return f"{service_account_name}_{created_by_user_id}_{creation_time_for_fuzz}"

View File

@ -4,9 +4,7 @@ from dataclasses import dataclass
from typing import Any from typing import Any
import jwt import jwt
import marshmallow
from flask import current_app from flask import current_app
from marshmallow import Schema
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
@ -81,14 +79,3 @@ class UserModel(SpiffworkflowBaseDBModel):
user_as_json_string = current_app.json.dumps(self) user_as_json_string = current_app.json.dumps(self)
user_dict: dict[str, Any] = current_app.json.loads(user_as_json_string) user_dict: dict[str, Any] = current_app.json.loads(user_as_json_string)
return user_dict return user_dict
class UserModelSchema(Schema):
class Meta:
model = UserModel
# load_instance = True
# include_relationships = False
# exclude = ("UserGroupAssignment",)
id = marshmallow.fields.String(required=True)
username = marshmallow.fields.String(required=True)

View File

@ -101,10 +101,11 @@ def _run_extension(
process_model = _get_process_model(process_model_identifier) process_model = _get_process_model(process_model_identifier)
except ApiError as ex: except ApiError as ex:
if ex.error_code == "process_model_cannot_be_found": if ex.error_code == "process_model_cannot_be_found":
# if process_model_identifier.startswith(current_app.config["SPIFFWORKFLOW_BACKEND_EXTENSIONS_PROCESS_MODEL_PREFIX"])
raise ApiError( raise ApiError(
error_code="invalid_process_model_extension", error_code="invalid_process_model_extension",
message=( message=(
f"Process Model '{process_model_identifier}' cannot be run as an extension. It must be in the" f"Process Model '{process_model_identifier}' could not be found as an extension. It must be in the"
" correct Process Group:" " correct Process Group:"
f" {current_app.config['SPIFFWORKFLOW_BACKEND_EXTENSIONS_PROCESS_MODEL_PREFIX']}" f" {current_app.config['SPIFFWORKFLOW_BACKEND_EXTENSIONS_PROCESS_MODEL_PREFIX']}"
), ),
@ -124,9 +125,11 @@ def _run_extension(
ui_schema_action = None ui_schema_action = None
persistence_level = "none" persistence_level = "none"
process_id_to_run = None
if body and "ui_schema_action" in body: if body and "ui_schema_action" in body:
ui_schema_action = body["ui_schema_action"] ui_schema_action = body["ui_schema_action"]
persistence_level = ui_schema_action.get("persistence_level", "none") persistence_level = ui_schema_action.get("persistence_level", "none")
process_id_to_run = ui_schema_action.get("process_id_to_run", None)
process_instance = None process_instance = None
if persistence_level == "none": if persistence_level == "none":
@ -145,7 +148,9 @@ def _run_extension(
processor = None processor = None
try: try:
processor = ProcessInstanceProcessor( processor = ProcessInstanceProcessor(
process_instance, script_engine=CustomBpmnScriptEngine(use_restricted_script_engine=False) process_instance,
script_engine=CustomBpmnScriptEngine(use_restricted_script_engine=False),
process_id_to_run=process_id_to_run,
) )
if body and "extension_input" in body: if body and "extension_input" in body:
processor.do_engine_steps(save=False, execution_strategy_name="run_current_ready_tasks") processor.do_engine_steps(save=False, execution_strategy_name="run_current_ready_tasks")
@ -166,12 +171,13 @@ def _run_extension(
# we need to recurse through all last tasks if the last task is a call activity or subprocess. # we need to recurse through all last tasks if the last task is a call activity or subprocess.
if processor is not None: if processor is not None:
task = processor.bpmn_process_instance.last_task task = processor.bpmn_process_instance.last_task
raise ApiError.from_task( if task is not None:
error_code="unknown_exception", raise ApiError.from_task(
message=f"An unknown error occurred. Original error: {e}", error_code="unknown_exception",
status_code=400, message=f"An unknown error occurred. Original error: {e}",
task=task, status_code=400,
) from e task=task,
) from e
raise e raise e
task_data = {} task_data = {}

View File

@ -18,6 +18,7 @@ from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
from spiffworkflow_backend.models.group import SPIFF_GUEST_GROUP from spiffworkflow_backend.models.group import SPIFF_GUEST_GROUP
from spiffworkflow_backend.models.group import SPIFF_NO_AUTH_GROUP from spiffworkflow_backend.models.group import SPIFF_NO_AUTH_GROUP
from spiffworkflow_backend.models.service_account import ServiceAccountModel
from spiffworkflow_backend.models.task import TaskModel # noqa: F401 from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.models.user import SPIFF_GUEST_USER from spiffworkflow_backend.models.user import SPIFF_GUEST_USER
from spiffworkflow_backend.models.user import SPIFF_NO_AUTH_USER from spiffworkflow_backend.models.user import SPIFF_NO_AUTH_USER
@ -51,155 +52,35 @@ def verify_token(token: str | None = None, force_run: bool | None = False) -> No
ApiError: If not on production and token is not valid, returns an 'invalid_token' 403 error. ApiError: If not on production and token is not valid, returns an 'invalid_token' 403 error.
If on production and user is not authenticated, returns a 'no_user' 403 error. If on production and user is not authenticated, returns a 'no_user' 403 error.
""" """
user_info = None
if not force_run and AuthorizationService.should_disable_auth_for_request(): if not force_run and AuthorizationService.should_disable_auth_for_request():
return None return None
if not token and "Authorization" in request.headers: token_info = _find_token_from_headers(token)
token = request.headers["Authorization"].removeprefix("Bearer ")
if not token and "access_token" in request.cookies:
if request.path.startswith(f"{V1_API_PATH_PREFIX}/process-data-file-download/") or request.path.startswith(
f"{V1_API_PATH_PREFIX}/extensions-get-data/"
):
token = request.cookies["access_token"]
# This should never be set here but just in case # This should never be set here but just in case
_clear_auth_tokens_from_thread_local_data() _clear_auth_tokens_from_thread_local_data()
if token: user_model = None
user_model = None if token_info["token"] is not None:
decoded_token = get_decoded_token(token) # import pdb; pdb.set_trace()
user_model = _get_user_model_from_token(token_info["token"])
elif token_info["api_key"] is not None:
user_model = _get_user_model_from_api_key(token_info["api_key"])
if decoded_token is not None: if user_model:
if "token_type" in decoded_token: g.user = user_model
token_type = decoded_token["token_type"]
if token_type == "internal": # noqa: S105
try:
user_model = get_user_from_decoded_internal_token(decoded_token)
except Exception as e:
current_app.logger.error(
f"Exception in verify_token getting user from decoded internal token. {e}"
)
# if the user is forced logged out then stop processing the token # If the user is valid, store the token for this session
if _force_logout_user_if_necessary(user_model): if g.user:
return None if token_info["token"]:
elif "iss" in decoded_token.keys():
user_info = None
try:
if AuthenticationService.validate_id_or_access_token(token):
user_info = decoded_token
except TokenExpiredError as token_expired_error:
# Try to refresh the token
user = UserService.get_user_by_service_and_service_id(decoded_token["iss"], decoded_token["sub"])
if user:
refresh_token = AuthenticationService.get_refresh_token(user.id)
if refresh_token:
auth_token: dict = AuthenticationService.get_auth_token_from_refresh_token(refresh_token)
if auth_token and "error" not in auth_token and "id_token" in auth_token:
tld = current_app.config["THREAD_LOCAL_DATA"]
tld.new_access_token = auth_token["id_token"]
tld.new_id_token = auth_token["id_token"]
# We have the user, but this code is a bit convoluted, and will later demand
# a user_info object so it can look up the user. Sorry to leave this crap here.
user_info = {
"sub": user.service_id,
"iss": user.service,
}
if user_info is None:
raise ApiError(
error_code="invalid_token",
message="Your token is expired. Please Login",
status_code=401,
) from token_expired_error
except Exception as e:
raise ApiError(
error_code="fail_get_user_info",
message="Cannot get user info from token",
status_code=401,
) from e
if (
user_info is not None and "error" not in user_info and "iss" in user_info
): # not sure what to test yet
user_model = (
UserModel.query.filter(UserModel.service == user_info["iss"])
.filter(UserModel.service_id == user_info["sub"])
.first()
)
if user_model is None:
raise ApiError(
error_code="invalid_user",
message="Invalid user. Please log in.",
status_code=401,
)
# no user_info
else:
raise ApiError(
error_code="no_user_info",
message="Cannot retrieve user info",
status_code=401,
)
else:
current_app.logger.debug("token_type not in decode_token in verify_token")
raise ApiError(
error_code="invalid_token",
message="Invalid token. Please log in.",
status_code=401,
)
if user_model:
g.user = user_model
# If the user is valid, store the token for this session
if g.user:
# This is an id token, so we don't have a refresh token yet # This is an id token, so we don't have a refresh token yet
g.token = token g.token = token_info["token"]
get_scope(token) get_scope(token_info["token"])
return None return None
else:
raise ApiError(error_code="no_user_id", message="Cannot get a user id")
raise ApiError(error_code="invalid_token", message="Cannot validate token.", status_code=401) raise ApiError(error_code="invalid_token", message="Cannot validate token.", status_code=401)
def set_new_access_token_in_cookie(
response: flask.wrappers.Response,
) -> flask.wrappers.Response:
"""Checks if a new token has been set in THREAD_LOCAL_DATA and sets cookies if appropriate.
It will also delete the cookies if the user has logged out.
"""
tld = current_app.config["THREAD_LOCAL_DATA"]
domain_for_frontend_cookie: str | None = re.sub(
r"^https?:\/\/",
"",
current_app.config["SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND"],
)
if domain_for_frontend_cookie and domain_for_frontend_cookie.startswith("localhost"):
domain_for_frontend_cookie = None
# fixme - we should not be passing the access token back to the client
if hasattr(tld, "new_access_token") and tld.new_access_token:
response.set_cookie("access_token", tld.new_access_token, domain=domain_for_frontend_cookie)
# id_token is required for logging out since this gets passed back to the openid server
if hasattr(tld, "new_id_token") and tld.new_id_token:
response.set_cookie("id_token", tld.new_id_token, domain=domain_for_frontend_cookie)
if hasattr(tld, "user_has_logged_out") and tld.user_has_logged_out:
response.set_cookie("id_token", "", max_age=0, domain=domain_for_frontend_cookie)
response.set_cookie("access_token", "", max_age=0, domain=domain_for_frontend_cookie)
_clear_auth_tokens_from_thread_local_data()
return response
def login(redirect_url: str = "/", process_instance_id: int | None = None, task_guid: str | None = None) -> Response: def login(redirect_url: str = "/", process_instance_id: int | None = None, task_guid: str | None = None) -> Response:
if current_app.config.get("SPIFFWORKFLOW_BACKEND_AUTHENTICATION_DISABLED"): if current_app.config.get("SPIFFWORKFLOW_BACKEND_AUTHENTICATION_DISABLED"):
AuthorizationService.create_guest_token( AuthorizationService.create_guest_token(
@ -223,25 +104,13 @@ def login(redirect_url: str = "/", process_instance_id: int | None = None, task_
return redirect(login_redirect_url) return redirect(login_redirect_url)
def parse_id_token(token: str) -> Any:
"""Parse the id token."""
parts = token.split(".")
if len(parts) != 3:
raise Exception("Incorrect id token format")
payload = parts[1]
padded = payload + "=" * (4 - len(payload) % 4)
decoded = base64.b64decode(padded)
return json.loads(decoded)
def login_return(code: str, state: str, session_state: str = "") -> Response | None: def login_return(code: str, state: str, session_state: str = "") -> Response | None:
state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8")) state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8"))
state_redirect_url = state_dict["redirect_url"] state_redirect_url = state_dict["redirect_url"]
auth_token_object = AuthenticationService().get_auth_token_object(code) auth_token_object = AuthenticationService().get_auth_token_object(code)
if "id_token" in auth_token_object: if "id_token" in auth_token_object:
id_token = auth_token_object["id_token"] id_token = auth_token_object["id_token"]
user_info = parse_id_token(id_token) user_info = _parse_id_token(id_token)
if AuthenticationService.validate_id_or_access_token(id_token): if AuthenticationService.validate_id_or_access_token(id_token):
if user_info and "error" not in user_info: if user_info and "error" not in user_info:
@ -273,7 +142,7 @@ def login_return(code: str, state: str, session_state: str = "") -> Response | N
# FIXME: share more code with login_return and maybe attempt to get a refresh token # FIXME: share more code with login_return and maybe attempt to get a refresh token
def login_with_access_token(access_token: str) -> Response: def login_with_access_token(access_token: str) -> Response:
user_info = parse_id_token(access_token) user_info = _parse_id_token(access_token)
if AuthenticationService.validate_id_or_access_token(access_token): if AuthenticationService.validate_id_or_access_token(access_token):
if user_info and "error" not in user_info: if user_info and "error" not in user_info:
@ -320,22 +189,6 @@ def logout_return() -> Response:
return redirect(f"{frontend_url}/") return redirect(f"{frontend_url}/")
def get_decoded_token(token: str) -> dict | None:
try:
decoded_token = jwt.decode(token, options={"verify_signature": False})
except Exception as e:
raise ApiError(error_code="invalid_token", message="Cannot decode token.") from e
else:
if "token_type" in decoded_token or "iss" in decoded_token:
return decoded_token
else:
current_app.logger.error(f"Unknown token type in get_decoded_token: token: {token}")
raise ApiError(
error_code="unknown_token",
message="Unknown token type in get_decoded_token",
)
def get_scope(token: str) -> str: def get_scope(token: str) -> str:
scope = "" scope = ""
decoded_token = jwt.decode(token, options={"verify_signature": False}) decoded_token = jwt.decode(token, options={"verify_signature": False})
@ -344,18 +197,38 @@ def get_scope(token: str) -> str:
return scope return scope
def get_user_from_decoded_internal_token(decoded_token: dict) -> UserModel | None: # this isn't really a private method but it's also not a valid api call so underscoring it
sub = decoded_token["sub"] def _set_new_access_token_in_cookie(
parts = sub.split("::") response: flask.wrappers.Response,
service = parts[0].split(":")[1] ) -> flask.wrappers.Response:
service_id = parts[1].split(":")[1] """Checks if a new token has been set in THREAD_LOCAL_DATA and sets cookies if appropriate.
user: UserModel = (
UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first() It will also delete the cookies if the user has logged out.
"""
tld = current_app.config["THREAD_LOCAL_DATA"]
domain_for_frontend_cookie: str | None = re.sub(
r"^https?:\/\/",
"",
current_app.config["SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND"],
) )
if user: if domain_for_frontend_cookie and domain_for_frontend_cookie.startswith("localhost"):
return user domain_for_frontend_cookie = None
user = UserService.create_user(service_id, service, service_id)
return user # fixme - we should not be passing the access token back to the client
if hasattr(tld, "new_access_token") and tld.new_access_token:
response.set_cookie("access_token", tld.new_access_token, domain=domain_for_frontend_cookie)
# id_token is required for logging out since this gets passed back to the openid server
if hasattr(tld, "new_id_token") and tld.new_id_token:
response.set_cookie("id_token", tld.new_id_token, domain=domain_for_frontend_cookie)
if hasattr(tld, "user_has_logged_out") and tld.user_has_logged_out:
response.set_cookie("id_token", "", max_age=0, domain=domain_for_frontend_cookie)
response.set_cookie("access_token", "", max_age=0, domain=domain_for_frontend_cookie)
_clear_auth_tokens_from_thread_local_data()
return response
def _clear_auth_tokens_from_thread_local_data() -> None: def _clear_auth_tokens_from_thread_local_data() -> None:
@ -388,3 +261,158 @@ def _force_logout_user_if_necessary(user_model: UserModel | None = None) -> bool
tld.user_has_logged_out = True tld.user_has_logged_out = True
return True return True
return False return False
def _find_token_from_headers(token: str | None) -> dict[str, str | None]:
api_key = None
if not token and "Authorization" in request.headers:
token = request.headers["Authorization"].removeprefix("Bearer ")
if not token and "access_token" in request.cookies:
if request.path.startswith(f"{V1_API_PATH_PREFIX}/process-data-file-download/") or request.path.startswith(
f"{V1_API_PATH_PREFIX}/extensions-get-data/"
):
token = request.cookies["access_token"]
if not token and "X-API-KEY" in request.headers:
api_key = request.headers["X-API-KEY"]
token_info = {"token": token, "api_key": api_key}
return token_info
def _get_user_model_from_api_key(api_key: str) -> UserModel | None:
api_key_hash = ServiceAccountModel.hash_api_key(api_key)
service_account = ServiceAccountModel.query.filter_by(api_key_hash=api_key_hash).first()
user_model = None
if service_account is not None:
user_model = UserModel.query.filter_by(id=service_account.user_id).first()
return user_model
def _get_user_model_from_token(token: str) -> UserModel | None:
user_model = None
decoded_token = _get_decoded_token(token)
if decoded_token is not None:
if "token_type" in decoded_token:
token_type = decoded_token["token_type"]
if token_type == "internal": # noqa: S105
try:
user_model = _get_user_from_decoded_internal_token(decoded_token)
except Exception as e:
current_app.logger.error(
f"Exception in verify_token getting user from decoded internal token. {e}"
)
# if the user is forced logged out then stop processing the token
if _force_logout_user_if_necessary(user_model):
return None
elif "iss" in decoded_token.keys():
user_info = None
try:
if AuthenticationService.validate_id_or_access_token(token):
user_info = decoded_token
except TokenExpiredError as token_expired_error:
# Try to refresh the token
user = UserService.get_user_by_service_and_service_id(decoded_token["iss"], decoded_token["sub"])
if user:
refresh_token = AuthenticationService.get_refresh_token(user.id)
if refresh_token:
auth_token: dict = AuthenticationService.get_auth_token_from_refresh_token(refresh_token)
if auth_token and "error" not in auth_token and "id_token" in auth_token:
tld = current_app.config["THREAD_LOCAL_DATA"]
tld.new_access_token = auth_token["id_token"]
tld.new_id_token = auth_token["id_token"]
# We have the user, but this code is a bit convoluted, and will later demand
# a user_info object so it can look up the user. Sorry to leave this crap here.
user_info = {
"sub": user.service_id,
"iss": user.service,
}
if user_info is None:
raise ApiError(
error_code="invalid_token",
message="Your token is expired. Please Login",
status_code=401,
) from token_expired_error
except Exception as e:
raise ApiError(
error_code="fail_get_user_info",
message="Cannot get user info from token",
status_code=401,
) from e
if user_info is not None and "error" not in user_info and "iss" in user_info: # not sure what to test yet
user_model = (
UserModel.query.filter(UserModel.service == user_info["iss"])
.filter(UserModel.service_id == user_info["sub"])
.first()
)
if user_model is None:
raise ApiError(
error_code="invalid_user",
message="Invalid user. Please log in.",
status_code=401,
)
# no user_info
else:
raise ApiError(
error_code="no_user_info",
message="Cannot retrieve user info",
status_code=401,
)
else:
current_app.logger.debug("token_type not in decode_token in verify_token")
raise ApiError(
error_code="invalid_token",
message="Invalid token. Please log in.",
status_code=401,
)
return user_model
def _get_user_from_decoded_internal_token(decoded_token: dict) -> UserModel | None:
sub = decoded_token["sub"]
parts = sub.split("::")
service = parts[0].split(":")[1]
service_id = parts[1].split(":")[1]
user: UserModel = (
UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first()
)
if user:
return user
user = UserService.create_user(service_id, service, service_id)
return user
def _get_decoded_token(token: str) -> dict | None:
try:
decoded_token = jwt.decode(token, options={"verify_signature": False})
except Exception as e:
raise ApiError(error_code="invalid_token", message="Cannot decode token.") from e
else:
if "token_type" in decoded_token or "iss" in decoded_token:
return decoded_token
else:
current_app.logger.error(f"Unknown token type in get_decoded_token: token: {token}")
raise ApiError(
error_code="unknown_token",
message="Unknown token type in get_decoded_token",
)
def _parse_id_token(token: str) -> Any:
"""Parse the id token."""
parts = token.split(".")
if len(parts) != 3:
raise Exception("Incorrect id token format")
payload = parts[1]
padded = payload + "=" * (4 - len(payload) % 4)
decoded = base64.b64decode(padded)
return json.loads(decoded)

View File

@ -21,6 +21,7 @@ from spiffworkflow_backend.models.permission_assignment import PermissionAssignm
from spiffworkflow_backend.models.permission_target import PermissionTargetModel from spiffworkflow_backend.models.permission_target import PermissionTargetModel
from spiffworkflow_backend.models.principal import MissingPrincipalError from spiffworkflow_backend.models.principal import MissingPrincipalError
from spiffworkflow_backend.models.principal import PrincipalModel from spiffworkflow_backend.models.principal import PrincipalModel
from spiffworkflow_backend.models.service_account import SPIFF_SERVICE_ACCOUNT_AUTH_SERVICE
from spiffworkflow_backend.models.task import TaskModel # noqa: F401 from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.models.user import SPIFF_GUEST_USER from spiffworkflow_backend.models.user import SPIFF_GUEST_USER
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
@ -33,6 +34,7 @@ from spiffworkflow_backend.services.authentication_service import TokenNotProvid
from spiffworkflow_backend.services.authentication_service import UserNotLoggedInError from spiffworkflow_backend.services.authentication_service import UserNotLoggedInError
from spiffworkflow_backend.services.group_service import GroupService from spiffworkflow_backend.services.group_service import GroupService
from spiffworkflow_backend.services.user_service import UserService from spiffworkflow_backend.services.user_service import UserService
from sqlalchemy import and_
from sqlalchemy import or_ from sqlalchemy import or_
from sqlalchemy import text from sqlalchemy import text
@ -604,6 +606,8 @@ class AuthorizationService:
PermissionToAssign(permission="update", target_uri="/authentication/configuration") PermissionToAssign(permission="update", target_uri="/authentication/configuration")
) )
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/service-accounts"))
return permissions_to_assign return permissions_to_assign
@classmethod @classmethod
@ -891,7 +895,15 @@ class AuthorizationService:
cls, group_permissions: list[GroupPermissionsDict], group_permissions_only: bool = False cls, group_permissions: list[GroupPermissionsDict], group_permissions_only: bool = False
) -> None: ) -> None:
"""Adds new permission assignments and deletes old ones.""" """Adds new permission assignments and deletes old ones."""
initial_permission_assignments = PermissionAssignmentModel.query.all() initial_permission_assignments = (
PermissionAssignmentModel.query.outerjoin(
PrincipalModel,
and_(PrincipalModel.id == PermissionAssignmentModel.principal_id, PrincipalModel.user_id.is_not(None)),
)
.outerjoin(UserModel, UserModel.id == PrincipalModel.user_id)
.filter(or_(UserModel.id.is_(None), UserModel.service != SPIFF_SERVICE_ACCOUNT_AUTH_SERVICE)) # type: ignore
.all()
)
initial_user_to_group_assignments = UserGroupAssignmentModel.query.all() initial_user_to_group_assignments = UserGroupAssignmentModel.query.all()
group_permissions = group_permissions + cls.parse_permissions_yaml_into_group_info() group_permissions = group_permissions + cls.parse_permissions_yaml_into_group_info()
added_permissions = cls.add_permissions_from_group_permissions( added_permissions = cls.add_permissions_from_group_permissions(

View File

@ -403,16 +403,22 @@ class ProcessInstanceProcessor:
validate_only: bool = False, validate_only: bool = False,
script_engine: PythonScriptEngine | None = None, script_engine: PythonScriptEngine | None = None,
workflow_completed_handler: WorkflowCompletedHandler | None = None, workflow_completed_handler: WorkflowCompletedHandler | None = None,
process_id_to_run: str | None = None,
) -> None: ) -> None:
"""Create a Workflow Processor based on the serialized information available in the process_instance model.""" """Create a Workflow Processor based on the serialized information available in the process_instance model."""
self._script_engine = script_engine or self.__class__._default_script_engine self._script_engine = script_engine or self.__class__._default_script_engine
self._workflow_completed_handler = workflow_completed_handler self._workflow_completed_handler = workflow_completed_handler
self.setup_processor_with_process_instance( self.setup_processor_with_process_instance(
process_instance_model=process_instance_model, validate_only=validate_only process_instance_model=process_instance_model,
validate_only=validate_only,
process_id_to_run=process_id_to_run,
) )
def setup_processor_with_process_instance( def setup_processor_with_process_instance(
self, process_instance_model: ProcessInstanceModel, validate_only: bool = False self,
process_instance_model: ProcessInstanceModel,
validate_only: bool = False,
process_id_to_run: str | None = None,
) -> None: ) -> None:
tld = current_app.config["THREAD_LOCAL_DATA"] tld = current_app.config["THREAD_LOCAL_DATA"]
tld.process_instance_id = process_instance_model.id tld.process_instance_id = process_instance_model.id
@ -441,7 +447,7 @@ class ProcessInstanceProcessor:
bpmn_process_spec, bpmn_process_spec,
subprocesses, subprocesses,
) = ProcessInstanceProcessor.get_process_model_and_subprocesses( ) = ProcessInstanceProcessor.get_process_model_and_subprocesses(
process_instance_model.process_model_identifier process_instance_model.process_model_identifier, process_id_to_run=process_id_to_run
) )
self.process_model_identifier = process_instance_model.process_model_identifier self.process_model_identifier = process_instance_model.process_model_identifier
@ -471,7 +477,9 @@ class ProcessInstanceProcessor:
@classmethod @classmethod
def get_process_model_and_subprocesses( def get_process_model_and_subprocesses(
cls, process_model_identifier: str cls,
process_model_identifier: str,
process_id_to_run: str | None = None,
) -> tuple[BpmnProcessSpec, IdToBpmnProcessSpecMapping]: ) -> tuple[BpmnProcessSpec, IdToBpmnProcessSpecMapping]:
process_model_info = ProcessModelService.get_process_model(process_model_identifier) process_model_info = ProcessModelService.get_process_model(process_model_identifier)
if process_model_info is None: if process_model_info is None:
@ -482,7 +490,7 @@ class ProcessInstanceProcessor:
) )
) )
spec_files = FileSystemService.get_files(process_model_info) spec_files = FileSystemService.get_files(process_model_info)
return cls.get_spec(spec_files, process_model_info) return cls.get_spec(spec_files, process_model_info, process_id_to_run=process_id_to_run)
@classmethod @classmethod
def get_bpmn_process_instance_from_process_model(cls, process_model_identifier: str) -> BpmnWorkflow: def get_bpmn_process_instance_from_process_model(cls, process_model_identifier: str) -> BpmnWorkflow:
@ -1303,11 +1311,15 @@ class ProcessInstanceProcessor:
@staticmethod @staticmethod
def get_spec( def get_spec(
files: list[File], process_model_info: ProcessModelInfo files: list[File],
process_model_info: ProcessModelInfo,
process_id_to_run: str | None = None,
) -> tuple[BpmnProcessSpec, IdToBpmnProcessSpecMapping]: ) -> tuple[BpmnProcessSpec, IdToBpmnProcessSpecMapping]:
"""Returns a SpiffWorkflow specification for the given process_instance spec, using the files provided.""" """Returns a SpiffWorkflow specification for the given process_instance spec, using the files provided."""
parser = ProcessInstanceProcessor.get_parser() parser = ProcessInstanceProcessor.get_parser()
process_id = process_id_to_run or process_model_info.primary_process_id
for file in files: for file in files:
data = SpecFileService.get_data(process_model_info, file.name) data = SpecFileService.get_data(process_model_info, file.name)
try: try:
@ -1322,7 +1334,7 @@ class ProcessInstanceProcessor:
error_code="invalid_xml", error_code="invalid_xml",
message=f"'{file.name}' is not a valid xml file." + str(xse), message=f"'{file.name}' is not a valid xml file." + str(xse),
) from xse ) from xse
if process_model_info.primary_process_id is None or process_model_info.primary_process_id == "": if process_id is None or process_id == "":
raise ( raise (
ApiError( ApiError(
error_code="no_primary_bpmn_error", error_code="no_primary_bpmn_error",
@ -1332,10 +1344,10 @@ class ProcessInstanceProcessor:
ProcessInstanceProcessor.update_spiff_parser_with_all_process_dependency_files(parser) ProcessInstanceProcessor.update_spiff_parser_with_all_process_dependency_files(parser)
try: try:
bpmn_process_spec = parser.get_spec(process_model_info.primary_process_id) bpmn_process_spec = parser.get_spec(process_id)
# returns a dict of {process_id: bpmn_process_spec}, otherwise known as an IdToBpmnProcessSpecMapping # returns a dict of {process_id: bpmn_process_spec}, otherwise known as an IdToBpmnProcessSpecMapping
subprocesses = parser.get_subprocess_specs(process_model_info.primary_process_id) subprocesses = parser.get_subprocess_specs(process_id)
except ValidationException as ve: except ValidationException as ve:
raise ApiError( raise ApiError(
error_code="process_instance_validation_error", error_code="process_instance_validation_error",

View File

@ -0,0 +1,54 @@
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
from spiffworkflow_backend.models.service_account import SPIFF_SERVICE_ACCOUNT_AUTH_SERVICE
from spiffworkflow_backend.models.service_account import SPIFF_SERVICE_ACCOUNT_AUTH_SERVICE_ID_PREFIX
from spiffworkflow_backend.models.service_account import ServiceAccountModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.user_service import UserService
class ServiceAccountService:
@classmethod
def create_service_account(cls, name: str, service_account_creator: UserModel) -> ServiceAccountModel:
api_key = ServiceAccountModel.generate_api_key()
api_key_hash = ServiceAccountModel.hash_api_key(api_key)
username = ServiceAccountModel.generate_username_for_related_user(name, service_account_creator.id)
service_account_user = UserModel(
username=username,
email=f"{username}@spiff.service.account.example.com",
service=SPIFF_SERVICE_ACCOUNT_AUTH_SERVICE,
service_id=f"{SPIFF_SERVICE_ACCOUNT_AUTH_SERVICE_ID_PREFIX}_{username}",
)
db.session.add(service_account_user)
service_account = ServiceAccountModel(
name=name,
created_by_user_id=service_account_creator.id,
api_key_hash=api_key_hash,
user=service_account_user,
)
db.session.add(service_account)
ServiceAccountModel.commit_with_rollback_on_exception()
cls.associated_service_account_with_permissions(service_account_user, service_account_creator)
service_account.api_key = api_key
return service_account
@classmethod
def associated_service_account_with_permissions(
cls, service_account_user: UserModel, service_account_creator: UserModel
) -> None:
principal = UserService.create_principal(service_account_user.id)
user_permissions = sorted(UserService.get_permission_targets_for_user(service_account_creator))
permission_objects = []
for user_permission in user_permissions:
permission_objects.append(
PermissionAssignmentModel(
principal_id=principal.id,
permission_target_id=user_permission[0],
permission=user_permission[1],
grant_type=user_permission[2],
)
)
db.session.bulk_save_objects(permission_objects)
ServiceAccountModel.commit_with_rollback_on_exception()

View File

@ -170,3 +170,27 @@ class UserService:
human_task_user = HumanTaskUserModel(user_id=user.id, human_task_id=human_task.id) human_task_user = HumanTaskUserModel(user_id=user.id, human_task_id=human_task.id)
db.session.add(human_task_user) db.session.add(human_task_user)
db.session.commit() db.session.commit()
@classmethod
def get_permission_targets_for_user(cls, user: UserModel, check_groups: bool = True) -> set[tuple[str, str, str]]:
unique_permission_assignments = set()
for permission_assignment in user.principal.permission_assignments:
unique_permission_assignments.add(
(
permission_assignment.permission_target_id,
permission_assignment.permission,
permission_assignment.grant_type,
)
)
if check_groups:
for group in user.groups:
for permission_assignment in group.principal.permission_assignments:
unique_permission_assignments.add(
(
permission_assignment.permission_target_id,
permission_assignment.permission,
permission_assignment.grant_type,
)
)
return unique_permission_assignments

View File

@ -9,6 +9,8 @@ from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authentication_service import AuthenticationService from spiffworkflow_backend.services.authentication_service import AuthenticationService
from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.authorization_service import GroupPermissionsDict from spiffworkflow_backend.services.authorization_service import GroupPermissionsDict
from spiffworkflow_backend.services.service_account_service import ServiceAccountService
from spiffworkflow_backend.services.user_service import UserService
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
@ -87,3 +89,30 @@ class TestAuthentication(BaseTest):
assert sorted(group_identifiers) == ["everybody", "group_one"] assert sorted(group_identifiers) == ["everybody", "group_one"]
self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey") self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey")
self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo") self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo")
def test_does_not_remove_permissions_from_service_accounts_on_refresh(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
service_account = ServiceAccountService.create_service_account("sa_api_key", with_super_admin_user)
service_account_permissions_before = sorted(
UserService.get_permission_targets_for_user(service_account.user, check_groups=False)
)
# make sure running refresh_permissions doesn't remove the user from the group
group_info: list[GroupPermissionsDict] = [
{
"users": [],
"name": "group_one",
"permissions": [{"actions": ["create", "read"], "uri": "PG:hey"}],
}
]
AuthorizationService.refresh_permissions(group_info, group_permissions_only=True)
service_account_permissions_after = sorted(
UserService.get_permission_targets_for_user(service_account.user, check_groups=False)
)
assert service_account_permissions_before == service_account_permissions_after

View File

@ -0,0 +1,48 @@
import json
from flask.app import Flask
from flask.testing import FlaskClient
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.service_account_service import ServiceAccountService
from spiffworkflow_backend.services.user_service import UserService
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
class TestServiceAccounts(BaseTest):
def test_can_create_a_service_account(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
api_key_name = "heyhey"
service_account = ServiceAccountService.create_service_account(api_key_name, with_super_admin_user)
assert service_account is not None
assert service_account.created_by_user_id == with_super_admin_user.id
assert service_account.name == api_key_name
assert service_account.api_key is not None
# ci and local set different permissions for the admin user so figure out dynamically
admin_permissions = sorted(UserService.get_permission_targets_for_user(with_super_admin_user))
service_account_permissions = sorted(
UserService.get_permission_targets_for_user(service_account.user, check_groups=False)
)
assert admin_permissions == service_account_permissions
# ensure service account can actually access the api
post_body = {
"key": "secret_key",
"value": "hey_value",
}
response = client.post(
"/v1.0/secrets",
content_type="application/json",
headers={"X-API-KEY": service_account.api_key},
data=json.dumps(post_body),
)
assert response.status_code == 201
assert response.json is not None
assert response.json["key"] == post_body["key"]

View File

@ -515,5 +515,6 @@ class TestAuthorizationService(BaseTest):
("/secrets/*", "delete"), ("/secrets/*", "delete"),
("/secrets/*", "read"), ("/secrets/*", "read"),
("/secrets/*", "update"), ("/secrets/*", "update"),
("/service-accounts", "create"),
] ]
) )

View File

@ -1,22 +1,11 @@
// @ts-ignore import { BrowserRouter } from 'react-router-dom';
import { Content } from '@carbon/react';
import { BrowserRouter, Routes, Route } from 'react-router-dom';
import { defineAbility } from '@casl/ability'; import { defineAbility } from '@casl/ability';
import React from 'react'; import React from 'react';
import NavigationBar from './components/NavigationBar';
import HomePageRoutes from './routes/HomePageRoutes';
import About from './routes/About';
import ErrorBoundary from './components/ErrorBoundary';
import AdminRoutes from './routes/AdminRoutes';
import { AbilityContext } from './contexts/Can'; import { AbilityContext } from './contexts/Can';
import UserService from './services/UserService'; import UserService from './services/UserService';
import APIErrorProvider from './contexts/APIErrorContext'; import APIErrorProvider from './contexts/APIErrorContext';
import ScrollToTop from './components/ScrollToTop'; import ContainerForExtensions from './ContainerForExtensions';
import EditorRoutes from './routes/EditorRoutes';
import Extension from './routes/Extension';
export default function App() { export default function App() {
if (!UserService.isLoggedIn()) { if (!UserService.isLoggedIn()) {
@ -26,34 +15,13 @@ export default function App() {
const ability = defineAbility(() => {}); const ability = defineAbility(() => {});
let contentClassName = 'main-site-body-centered';
if (window.location.pathname.startsWith('/editor/')) {
contentClassName = 'no-center-stuff';
}
return ( return (
<div className="cds--white"> <div className="cds--white">
{/* @ts-ignore */} {/* @ts-ignore */}
<AbilityContext.Provider value={ability}> <AbilityContext.Provider value={ability}>
<APIErrorProvider> <APIErrorProvider>
<BrowserRouter> <BrowserRouter>
<NavigationBar /> <ContainerForExtensions />
<Content className={contentClassName}>
<ScrollToTop />
<ErrorBoundary>
<Routes>
<Route path="/*" element={<HomePageRoutes />} />
<Route path="/about" element={<About />} />
<Route path="/tasks/*" element={<HomePageRoutes />} />
<Route path="/admin/*" element={<AdminRoutes />} />
<Route path="/editor/*" element={<EditorRoutes />} />
<Route
path="/extensions/:page_identifier"
element={<Extension />}
/>
</Routes>
</ErrorBoundary>
</Content>
</BrowserRouter> </BrowserRouter>
</APIErrorProvider> </APIErrorProvider>
</AbilityContext.Provider> </AbilityContext.Provider>

View File

@ -0,0 +1,108 @@
import { Content } from '@carbon/react';
import { Routes, Route } from 'react-router-dom';
import React, { useEffect, useState } from 'react';
import NavigationBar from './components/NavigationBar';
import HomePageRoutes from './routes/HomePageRoutes';
import About from './routes/About';
import ErrorBoundary from './components/ErrorBoundary';
import AdminRoutes from './routes/AdminRoutes';
import ScrollToTop from './components/ScrollToTop';
import EditorRoutes from './routes/EditorRoutes';
import Extension from './routes/Extension';
import { useUriListForPermissions } from './hooks/UriListForPermissions';
import { PermissionsToCheck, ProcessFile, ProcessModel } from './interfaces';
import { usePermissionFetcher } from './hooks/PermissionService';
import {
ExtensionUiSchema,
UiSchemaUxElement,
} from './extension_ui_schema_interfaces';
import HttpService from './services/HttpService';
export default function ContainerForExtensions() {
const [extensionUxElements, setExtensionNavigationItems] = useState<
UiSchemaUxElement[] | null
>(null);
let contentClassName = 'main-site-body-centered';
if (window.location.pathname.startsWith('/editor/')) {
contentClassName = 'no-center-stuff';
}
const { targetUris } = useUriListForPermissions();
const permissionRequestData: PermissionsToCheck = {
[targetUris.extensionListPath]: ['GET'],
};
const { ability, permissionsLoaded } = usePermissionFetcher(
permissionRequestData
);
// eslint-disable-next-line sonarjs/cognitive-complexity
useEffect(() => {
if (!permissionsLoaded) {
return;
}
const processExtensionResult = (processModels: ProcessModel[]) => {
const eni: UiSchemaUxElement[] = processModels
.map((processModel: ProcessModel) => {
const extensionUiSchemaFile = processModel.files.find(
(file: ProcessFile) => file.name === 'extension_uischema.json'
);
if (extensionUiSchemaFile && extensionUiSchemaFile.file_contents) {
try {
const extensionUiSchema: ExtensionUiSchema = JSON.parse(
extensionUiSchemaFile.file_contents
);
if (extensionUiSchema.ux_elements) {
return extensionUiSchema.ux_elements;
}
} catch (jsonParseError: any) {
console.error(
`Unable to get navigation items for ${processModel.id}`
);
}
}
return [] as UiSchemaUxElement[];
})
.flat();
if (eni) {
setExtensionNavigationItems(eni);
}
};
if (ability.can('GET', targetUris.extensionListPath)) {
HttpService.makeCallToBackend({
path: targetUris.extensionListPath,
successCallback: processExtensionResult,
});
}
}, [targetUris.extensionListPath, permissionsLoaded, ability]);
return (
<>
<NavigationBar extensionUxElements={extensionUxElements} />
<Content className={contentClassName}>
<ScrollToTop />
<ErrorBoundary>
<Routes>
<Route path="/*" element={<HomePageRoutes />} />
<Route path="/about" element={<About />} />
<Route path="/tasks/*" element={<HomePageRoutes />} />
<Route
path="/admin/*"
element={
<AdminRoutes extensionUxElements={extensionUxElements} />
}
/>
<Route path="/editor/*" element={<EditorRoutes />} />
<Route
path="/extensions/:page_identifier"
element={<Extension />}
/>
</Routes>
</ErrorBoundary>
</Content>
</>
);
}

View File

@ -0,0 +1,30 @@
import { UiSchemaUxElement } from '../extension_ui_schema_interfaces';
type OwnProps = {
displayLocation: string;
elementCallback: Function;
extensionUxElements?: UiSchemaUxElement[] | null;
};
export default function ExtensionUxElementForDisplay({
displayLocation,
elementCallback,
extensionUxElements,
}: OwnProps) {
if (!extensionUxElements) {
return null;
}
const mainElement = () => {
return extensionUxElements.map(
(uxElement: UiSchemaUxElement, index: number) => {
if (uxElement.display_location === displayLocation) {
return elementCallback(uxElement, index);
}
return null;
}
);
};
return <>{mainElement()}</>;
}

View File

@ -1,11 +1,5 @@
// @ts-ignore
import { Filter } from '@carbon/icons-react'; import { Filter } from '@carbon/icons-react';
import { import { Button, Grid, Column } from '@carbon/react';
Button,
Grid,
Column,
// @ts-ignore
} from '@carbon/react';
type OwnProps = { type OwnProps = {
showFilterOptions: boolean; showFilterOptions: boolean;

View File

@ -24,18 +24,20 @@ import { Can } from '@casl/react';
import logo from '../logo.svg'; import logo from '../logo.svg';
import UserService from '../services/UserService'; import UserService from '../services/UserService';
import { useUriListForPermissions } from '../hooks/UriListForPermissions'; import { useUriListForPermissions } from '../hooks/UriListForPermissions';
import { PermissionsToCheck, ProcessModel, ProcessFile } from '../interfaces'; import { PermissionsToCheck } from '../interfaces';
import { import { UiSchemaUxElement } from '../extension_ui_schema_interfaces';
ExtensionUiSchema,
UiSchemaUxElement,
} from '../extension_ui_schema_interfaces';
import { usePermissionFetcher } from '../hooks/PermissionService'; import { usePermissionFetcher } from '../hooks/PermissionService';
import HttpService, { UnauthenticatedError } from '../services/HttpService'; import { UnauthenticatedError } from '../services/HttpService';
import { DOCUMENTATION_URL, SPIFF_ENVIRONMENT } from '../config'; import { DOCUMENTATION_URL, SPIFF_ENVIRONMENT } from '../config';
import appVersionInfo from '../helpers/appVersionInfo'; import appVersionInfo from '../helpers/appVersionInfo';
import { slugifyString } from '../helpers'; import { slugifyString } from '../helpers';
import ExtensionUxElementForDisplay from './ExtensionUxElementForDisplay';
export default function NavigationBar() { type OwnProps = {
extensionUxElements?: UiSchemaUxElement[] | null;
};
export default function NavigationBar({ extensionUxElements }: OwnProps) {
const handleLogout = () => { const handleLogout = () => {
UserService.doLogout(); UserService.doLogout();
}; };
@ -46,9 +48,6 @@ export default function NavigationBar() {
const location = useLocation(); const location = useLocation();
const [activeKey, setActiveKey] = useState<string>(''); const [activeKey, setActiveKey] = useState<string>('');
const [extensionNavigationItems, setExtensionNavigationItems] = useState<
UiSchemaUxElement[] | null
>(null);
const { targetUris } = useUriListForPermissions(); const { targetUris } = useUriListForPermissions();
@ -65,9 +64,7 @@ export default function NavigationBar() {
[targetUris.processInstanceListForMePath]: ['POST'], [targetUris.processInstanceListForMePath]: ['POST'],
[targetUris.processGroupListPath]: ['GET'], [targetUris.processGroupListPath]: ['GET'],
}; };
const { ability, permissionsLoaded } = usePermissionFetcher( const { ability } = usePermissionFetcher(permissionRequestData);
permissionRequestData
);
// default to readthedocs and let someone specify an environment variable to override: // default to readthedocs and let someone specify an environment variable to override:
// //
@ -100,48 +97,6 @@ export default function NavigationBar() {
setActiveKey(newActiveKey); setActiveKey(newActiveKey);
}, [location]); }, [location]);
// eslint-disable-next-line sonarjs/cognitive-complexity
useEffect(() => {
if (!permissionsLoaded) {
return;
}
const processExtensionResult = (processModels: ProcessModel[]) => {
const eni: UiSchemaUxElement[] = processModels
.map((processModel: ProcessModel) => {
const extensionUiSchemaFile = processModel.files.find(
(file: ProcessFile) => file.name === 'extension_uischema.json'
);
if (extensionUiSchemaFile && extensionUiSchemaFile.file_contents) {
try {
const extensionUiSchema: ExtensionUiSchema = JSON.parse(
extensionUiSchemaFile.file_contents
);
if (extensionUiSchema.ux_elements) {
return extensionUiSchema.ux_elements;
}
} catch (jsonParseError: any) {
console.error(
`Unable to get navigation items for ${processModel.id}`
);
}
}
return [] as UiSchemaUxElement[];
})
.flat();
if (eni) {
setExtensionNavigationItems(eni);
}
};
if (ability.can('GET', targetUris.extensionListPath)) {
HttpService.makeCallToBackend({
path: targetUris.extensionListPath,
successCallback: processExtensionResult,
});
}
}, [targetUris.extensionListPath, permissionsLoaded, ability]);
const isActivePage = (menuItemPath: string) => { const isActivePage = (menuItemPath: string) => {
return activeKey === menuItemPath; return activeKey === menuItemPath;
}; };
@ -155,22 +110,6 @@ export default function NavigationBar() {
const userEmail = UserService.getUserEmail(); const userEmail = UserService.getUserEmail();
const username = UserService.getPreferredUsername(); const username = UserService.getPreferredUsername();
const extensionNavigationElementsForDisplayLocation = (
displayLocation: string,
elementCallback: Function
) => {
if (!extensionNavigationItems) {
return null;
}
return extensionNavigationItems.map((uxElement: UiSchemaUxElement) => {
if (uxElement.display_location === displayLocation) {
return elementCallback(uxElement);
}
return null;
});
};
const extensionUserProfileElement = (uxElement: UiSchemaUxElement) => { const extensionUserProfileElement = (uxElement: UiSchemaUxElement) => {
const navItemPage = `/extensions${uxElement.page}`; const navItemPage = `/extensions${uxElement.page}`;
return <a href={navItemPage}>{uxElement.label}</a>; return <a href={navItemPage}>{uxElement.label}</a>;
@ -196,10 +135,11 @@ export default function NavigationBar() {
<a target="_blank" href={documentationUrl} rel="noreferrer"> <a target="_blank" href={documentationUrl} rel="noreferrer">
Documentation Documentation
</a> </a>
{extensionNavigationElementsForDisplayLocation( <ExtensionUxElementForDisplay
'user_profile_item', displayLocation="user_profile_item"
extensionUserProfileElement elementCallback={extensionUserProfileElement}
)} extensionUxElements={extensionUxElements}
/>
{!UserService.authenticationDisabled() ? ( {!UserService.authenticationDisabled() ? (
<> <>
<hr /> <hr />
@ -345,10 +285,11 @@ export default function NavigationBar() {
</HeaderMenuItem> </HeaderMenuItem>
</Can> </Can>
{configurationElement()} {configurationElement()}
{extensionNavigationElementsForDisplayLocation( <ExtensionUxElementForDisplay
'header_menu_item', displayLocation="header_menu_item"
extensionHeaderMenuItemElement elementCallback={extensionHeaderMenuItemElement}
)} extensionUxElements={extensionUxElements}
/>
</> </>
); );
}; };

View File

@ -8,30 +8,47 @@ export enum UiSchemaPersistenceLevel {
none = 'none', none = 'none',
} }
export interface UiSchemaLocationSpecificConfig {
highlight_on_tabs?: string[];
}
export interface UiSchemaUxElement { export interface UiSchemaUxElement {
label: string; label: string;
page: string; page: string;
display_location: UiSchemaDisplayLocation; display_location: UiSchemaDisplayLocation;
location_specific_configs?: UiSchemaLocationSpecificConfig;
}
export interface UiSchemaForm {
form_schema_filename: any;
form_ui_schema_filename: any;
form_submit_button_label?: string;
} }
export interface UiSchemaAction { export interface UiSchemaAction {
api_path: string; api_path: string;
persistence_level?: UiSchemaPersistenceLevel;
navigate_to_on_form_submit?: string; navigate_to_on_form_submit?: string;
persistence_level?: UiSchemaPersistenceLevel;
process_id_to_run?: string;
results_markdown_filename?: string; results_markdown_filename?: string;
search_params_to_inject?: string[];
full_api_path?: boolean;
} }
export interface UiSchemaPageDefinition { export interface UiSchemaPageDefinition {
header: string; header: string;
api: string; api: string;
on_load?: UiSchemaAction; form?: UiSchemaForm;
on_form_submit?: UiSchemaAction;
form_schema_filename?: any;
form_ui_schema_filename?: any;
markdown_instruction_filename?: string; markdown_instruction_filename?: string;
navigate_instead_of_post_to_api?: boolean;
navigate_to_on_form_submit?: string; navigate_to_on_form_submit?: string;
on_form_submit?: UiSchemaAction;
on_load?: UiSchemaAction;
open_links_in_new_tab?: boolean;
} }
export interface UiSchemaPage { export interface UiSchemaPage {

View File

@ -23,8 +23,13 @@ import ProcessInterstitialPage from './ProcessInterstitialPage';
import MessageListPage from './MessageListPage'; import MessageListPage from './MessageListPage';
import DataStorePage from './DataStorePage'; import DataStorePage from './DataStorePage';
import ErrorDisplay from '../components/ErrorDisplay'; import ErrorDisplay from '../components/ErrorDisplay';
import { UiSchemaUxElement } from '../extension_ui_schema_interfaces';
export default function AdminRoutes() { type OwnProps = {
extensionUxElements?: UiSchemaUxElement[] | null;
};
export default function AdminRoutes({ extensionUxElements }: OwnProps) {
const location = useLocation(); const location = useLocation();
useEffect(() => {}, [location]); useEffect(() => {}, [location]);
@ -118,7 +123,12 @@ export default function AdminRoutes() {
path="process-instances/all" path="process-instances/all"
element={<ProcessInstanceList variant="all" />} element={<ProcessInstanceList variant="all" />}
/> />
<Route path="configuration/*" element={<Configuration />} /> <Route
path="configuration/*"
element={
<Configuration extensionUxElements={extensionUxElements} />
}
/>
<Route <Route
path="process-models/:process_model_id/form-builder" path="process-models/:process_model_id/form-builder"
element={<JsonSchemaFormBuilder />} element={<JsonSchemaFormBuilder />}

View File

@ -12,8 +12,15 @@ import { useUriListForPermissions } from '../hooks/UriListForPermissions';
import { PermissionsToCheck } from '../interfaces'; import { PermissionsToCheck } from '../interfaces';
import { usePermissionFetcher } from '../hooks/PermissionService'; import { usePermissionFetcher } from '../hooks/PermissionService';
import { setPageTitle } from '../helpers'; import { setPageTitle } from '../helpers';
import { UiSchemaUxElement } from '../extension_ui_schema_interfaces';
import ExtensionUxElementForDisplay from '../components/ExtensionUxElementForDisplay';
import Extension from './Extension';
export default function Configuration() { type OwnProps = {
extensionUxElements?: UiSchemaUxElement[] | null;
};
export default function Configuration({ extensionUxElements }: OwnProps) {
const location = useLocation(); const location = useLocation();
const { removeError } = useAPIError(); const { removeError } = useAPIError();
const [selectedTabIndex, setSelectedTabIndex] = useState<number>(0); const [selectedTabIndex, setSelectedTabIndex] = useState<number>(0);
@ -38,6 +45,29 @@ export default function Configuration() {
setSelectedTabIndex(newSelectedTabIndex); setSelectedTabIndex(newSelectedTabIndex);
}, [location, removeError]); }, [location, removeError]);
const configurationExtensionTab = (
uxElement: UiSchemaUxElement,
uxElementIndex: number
) => {
const navItemPage = `/admin/configuration/extension${uxElement.page}`;
let pagesToCheck = [uxElement.page];
if (
uxElement.location_specific_configs &&
uxElement.location_specific_configs.highlight_on_tabs
) {
pagesToCheck = uxElement.location_specific_configs.highlight_on_tabs;
}
pagesToCheck.forEach((pageToCheck: string) => {
const pageToCheckNavItem = `/admin/configuration/extension${pageToCheck}`;
if (pageToCheckNavItem === location.pathname) {
setSelectedTabIndex(uxElementIndex + 2);
}
});
return <Tab onClick={() => navigate(navItemPage)}>{uxElement.label}</Tab>;
};
// wow, if you do not check to see if the permissions are loaded, then in safari, // wow, if you do not check to see if the permissions are loaded, then in safari,
// you will get {null} inside the <TabList> which totally explodes carbon (in safari!). // you will get {null} inside the <TabList> which totally explodes carbon (in safari!).
// we *think* that null inside a TabList works fine in all other browsers. // we *think* that null inside a TabList works fine in all other browsers.
@ -61,6 +91,11 @@ export default function Configuration() {
Authentications Authentications
</Tab> </Tab>
</Can> </Can>
<ExtensionUxElementForDisplay
displayLocation="configuration_tab_item"
elementCallback={configurationExtensionTab}
extensionUxElements={extensionUxElements}
/>
</TabList> </TabList>
</Tabs> </Tabs>
<br /> <br />
@ -70,6 +105,7 @@ export default function Configuration() {
<Route path="secrets/new" element={<SecretNew />} /> <Route path="secrets/new" element={<SecretNew />} />
<Route path="secrets/:key" element={<SecretShow />} /> <Route path="secrets/:key" element={<SecretShow />} />
<Route path="authentications" element={<AuthenticationList />} /> <Route path="authentications" element={<AuthenticationList />} />
<Route path="extension/:page_identifier" element={<Extension />} />;
</Routes> </Routes>
</> </>
); );

View File

@ -1,6 +1,7 @@
import { useCallback, useEffect, useState } from 'react'; import { useCallback, useEffect, useState } from 'react';
import { Button } from '@carbon/react';
import MDEditor from '@uiw/react-md-editor'; import MDEditor from '@uiw/react-md-editor';
import { useParams } from 'react-router-dom'; import { useParams, useSearchParams } from 'react-router-dom';
import { Editor } from '@monaco-editor/react'; import { Editor } from '@monaco-editor/react';
import { useUriListForPermissions } from '../hooks/UriListForPermissions'; import { useUriListForPermissions } from '../hooks/UriListForPermissions';
import { ProcessFile, ProcessModel } from '../interfaces'; import { ProcessFile, ProcessModel } from '../interfaces';
@ -20,6 +21,7 @@ import ErrorDisplay from '../components/ErrorDisplay';
export default function Extension() { export default function Extension() {
const { targetUris } = useUriListForPermissions(); const { targetUris } = useUriListForPermissions();
const params = useParams(); const params = useParams();
const [searchParams] = useSearchParams();
const [_processModel, setProcessModel] = useState<ProcessModel | null>(null); const [_processModel, setProcessModel] = useState<ProcessModel | null>(null);
const [formData, setFormData] = useState<any>(null); const [formData, setFormData] = useState<any>(null);
@ -40,6 +42,7 @@ export default function Extension() {
const { addError, removeError } = useAPIError(); const { addError, removeError } = useAPIError();
const setConfigsIfDesiredSchemaFile = useCallback( const setConfigsIfDesiredSchemaFile = useCallback(
// eslint-disable-next-line sonarjs/cognitive-complexity
(extensionUiSchemaFile: ProcessFile | null, pm: ProcessModel) => { (extensionUiSchemaFile: ProcessFile | null, pm: ProcessModel) => {
const processLoadResult = (result: any) => { const processLoadResult = (result: any) => {
setFormData(result.task_data); setFormData(result.task_data);
@ -64,10 +67,23 @@ export default function Extension() {
const pageDefinition = extensionUiSchema.pages[pageIdentifier]; const pageDefinition = extensionUiSchema.pages[pageIdentifier];
setUiSchemaPageDefinition(pageDefinition); setUiSchemaPageDefinition(pageDefinition);
setProcessModel(pm); setProcessModel(pm);
pm.files.forEach((file: ProcessFile) => {
filesByName[file.name] = file;
});
const postBody: ExtensionPostBody = { extension_input: {} };
postBody.ui_schema_action = pageDefinition.on_load;
if (pageDefinition.on_load) { if (pageDefinition.on_load) {
const postBody: ExtensionPostBody = { extension_input: {} };
if (pageDefinition.on_load.search_params_to_inject) {
pageDefinition.on_load.search_params_to_inject.forEach(
(searchParam: string) => {
if (searchParams.get(searchParam) !== undefined) {
postBody.extension_input[searchParam] =
searchParams.get(searchParam);
}
}
);
}
postBody.ui_schema_action = pageDefinition.on_load;
HttpService.makeCallToBackend({ HttpService.makeCallToBackend({
path: `${targetUris.extensionListPath}/${pageDefinition.on_load.api_path}`, path: `${targetUris.extensionListPath}/${pageDefinition.on_load.api_path}`,
successCallback: processLoadResult, successCallback: processLoadResult,
@ -78,7 +94,12 @@ export default function Extension() {
} }
} }
}, },
[targetUris.extensionListPath, params] [
targetUris.extensionListPath,
params.page_identifier,
searchParams,
filesByName,
]
); );
useEffect(() => { useEffect(() => {
@ -86,7 +107,6 @@ export default function Extension() {
processModels.forEach((pm: ProcessModel) => { processModels.forEach((pm: ProcessModel) => {
let extensionUiSchemaFile: ProcessFile | null = null; let extensionUiSchemaFile: ProcessFile | null = null;
pm.files.forEach((file: ProcessFile) => { pm.files.forEach((file: ProcessFile) => {
filesByName[file.name] = file;
if (file.name === 'extension_uischema.json') { if (file.name === 'extension_uischema.json') {
extensionUiSchemaFile = file; extensionUiSchemaFile = file;
} }
@ -100,21 +120,55 @@ export default function Extension() {
successCallback: processExtensionResult, successCallback: processExtensionResult,
}); });
}, [ }, [
filesByName,
params,
setConfigsIfDesiredSchemaFile, setConfigsIfDesiredSchemaFile,
targetUris.extensionListPath, targetUris.extensionListPath,
targetUris.extensionPath, targetUris.extensionPath,
]); ]);
const processSubmitResult = (result: any) => { const interpolateNavigationString = (
setProcessedTaskData(result.task_data); navigationString: string,
if (result.rendered_results_markdown) { baseData: any
setMarkdownToRenderOnSubmit(result.rendered_results_markdown); ) => {
let isValid = true;
const data = { backend_base_url: BACKEND_BASE_URL, ...baseData };
const optionString = navigationString.replace(/{(\w+)}/g, (_, k) => {
const value = data[k];
if (value === undefined) {
isValid = false;
addError({
message: `Could not find a value for ${k} in form data.`,
});
}
return value;
});
if (!isValid) {
return null;
} }
setFormButtonsDisabled(false); return optionString;
}; };
const processSubmitResult = (result: any) => {
if (
uiSchemaPageDefinition &&
uiSchemaPageDefinition.navigate_to_on_form_submit
) {
const optionString = interpolateNavigationString(
uiSchemaPageDefinition.navigate_to_on_form_submit,
result.task_data
);
if (optionString !== null) {
window.location.href = optionString;
}
} else {
setProcessedTaskData(result.task_data);
if (result.rendered_results_markdown) {
setMarkdownToRenderOnSubmit(result.rendered_results_markdown);
}
setFormButtonsDisabled(false);
}
};
// eslint-disable-next-line sonarjs/cognitive-complexity
const handleFormSubmit = (formObject: any, _event: any) => { const handleFormSubmit = (formObject: any, _event: any) => {
if (formButtonsDisabled) { if (formButtonsDisabled) {
return; return;
@ -129,34 +183,29 @@ export default function Extension() {
if ( if (
uiSchemaPageDefinition && uiSchemaPageDefinition &&
uiSchemaPageDefinition.navigate_to_on_form_submit uiSchemaPageDefinition.navigate_instead_of_post_to_api
) { ) {
let isValid = true; let optionString: string | null = '';
const optionString = if (uiSchemaPageDefinition.navigate_to_on_form_submit) {
uiSchemaPageDefinition.navigate_to_on_form_submit.replace( optionString = interpolateNavigationString(
/{(\w+)}/g, uiSchemaPageDefinition.navigate_to_on_form_submit,
(_, k) => { dataToSubmit
const value = dataToSubmit[k];
if (value === undefined) {
isValid = false;
addError({
message: `Could not find a value for ${k} in form data.`,
});
}
return value;
}
); );
if (!isValid) { if (optionString !== null) {
return; window.location.href = optionString;
setFormButtonsDisabled(false);
}
} }
const url = `${BACKEND_BASE_URL}/extensions-get-data/${params.page_identifier}/${optionString}`;
window.location.href = url;
setFormButtonsDisabled(false);
} else { } else {
const postBody: ExtensionPostBody = { extension_input: dataToSubmit }; let postBody: ExtensionPostBody = { extension_input: dataToSubmit };
let apiPath = targetUris.extensionPath; let apiPath = targetUris.extensionPath;
if (uiSchemaPageDefinition && uiSchemaPageDefinition.on_form_submit) { if (uiSchemaPageDefinition && uiSchemaPageDefinition.on_form_submit) {
apiPath = `${targetUris.extensionListPath}/${uiSchemaPageDefinition.on_form_submit.api_path}`; if (uiSchemaPageDefinition.on_form_submit.full_api_path) {
apiPath = `/${uiSchemaPageDefinition.on_form_submit.api_path}`;
postBody = dataToSubmit;
} else {
apiPath = `${targetUris.extensionListPath}/${uiSchemaPageDefinition.on_form_submit.api_path}`;
}
postBody.ui_schema_action = uiSchemaPageDefinition.on_form_submit; postBody.ui_schema_action = uiSchemaPageDefinition.on_form_submit;
} }
@ -193,22 +242,29 @@ export default function Extension() {
markdownContentsToRender.push(markdownToRenderOnLoad); markdownContentsToRender.push(markdownToRenderOnLoad);
} }
let mdEditorLinkTarget: string | undefined = '_blank';
if (uiSchemaPageDefinition.open_links_in_new_tab === false) {
mdEditorLinkTarget = undefined;
}
if (markdownContentsToRender.length > 0) { if (markdownContentsToRender.length > 0) {
componentsToDisplay.push( componentsToDisplay.push(
<div data-color-mode="light" className="with-bottom-margin"> <div data-color-mode="light" className="with-bottom-margin">
<MDEditor.Markdown <MDEditor.Markdown
linkTarget="_blank" linkTarget={mdEditorLinkTarget}
source={markdownContentsToRender.join('\n')} source={markdownContentsToRender.join('\n')}
/> />
</div> </div>
); );
} }
if (uiSchemaPageDefinition.form_schema_filename) { const uiSchemaForm = uiSchemaPageDefinition.form;
const formSchemaFile = if (uiSchemaForm) {
filesByName[uiSchemaPageDefinition.form_schema_filename]; const formSchemaFile = filesByName[uiSchemaForm.form_schema_filename];
const formUiSchemaFile = const formUiSchemaFile =
filesByName[uiSchemaPageDefinition.form_ui_schema_filename]; filesByName[uiSchemaForm.form_ui_schema_filename];
const submitButtonText =
uiSchemaForm.form_submit_button_label || 'Submit';
if (formSchemaFile.file_contents && formUiSchemaFile.file_contents) { if (formSchemaFile.file_contents && formUiSchemaFile.file_contents) {
componentsToDisplay.push( componentsToDisplay.push(
<CustomForm <CustomForm
@ -221,7 +277,15 @@ export default function Extension() {
onSubmit={handleFormSubmit} onSubmit={handleFormSubmit}
schema={JSON.parse(formSchemaFile.file_contents)} schema={JSON.parse(formSchemaFile.file_contents)}
uiSchema={JSON.parse(formUiSchemaFile.file_contents)} uiSchema={JSON.parse(formUiSchemaFile.file_contents)}
/> >
<Button
type="submit"
id="submit-button"
disabled={formButtonsDisabled}
>
{submitButtonText}
</Button>
</CustomForm>
); );
} }
} }