diff --git a/spiffworkflow-backend/migrations/versions/9d5b6c5c31a5_.py b/spiffworkflow-backend/migrations/versions/9d5b6c5c31a5_.py new file mode 100644 index 00000000..75507b4a --- /dev/null +++ b/spiffworkflow-backend/migrations/versions/9d5b6c5c31a5_.py @@ -0,0 +1,52 @@ +"""empty message + +Revision ID: 9d5b6c5c31a5 +Revises: 55bbdeb6b635 +Create Date: 2023-09-14 08:49:53.619192 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '9d5b6c5c31a5' +down_revision = '55bbdeb6b635' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('service_account', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('created_by_user_id', sa.Integer(), nullable=False), + sa.Column('api_key_hash', sa.String(length=255), nullable=False), + sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['created_by_user_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('name', 'created_by_user_id', name='service_account_uniq') + ) + with op.batch_alter_table('service_account', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_service_account_api_key_hash'), ['api_key_hash'], unique=True) + batch_op.create_index(batch_op.f('ix_service_account_created_by_user_id'), ['created_by_user_id'], unique=False) + batch_op.create_index(batch_op.f('ix_service_account_name'), ['name'], unique=False) + batch_op.create_index(batch_op.f('ix_service_account_user_id'), ['user_id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('service_account', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_service_account_user_id')) + batch_op.drop_index(batch_op.f('ix_service_account_name')) + batch_op.drop_index(batch_op.f('ix_service_account_created_by_user_id')) + batch_op.drop_index(batch_op.f('ix_service_account_api_key_hash')) + + op.drop_table('service_account') + # ### end Alembic commands ### diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py index 298e4a18..12b5dfec 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py @@ -25,7 +25,7 @@ from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import migrate from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import openid_blueprint -from spiffworkflow_backend.routes.user import set_new_access_token_in_cookie +from spiffworkflow_backend.routes.user import _set_new_access_token_in_cookie from spiffworkflow_backend.routes.user import verify_token from spiffworkflow_backend.routes.user_blueprint import user_blueprint from spiffworkflow_backend.services.authorization_service import AuthorizationService @@ -182,7 +182,7 @@ def create_app() -> flask.app.Flask: app.before_request(verify_token) app.before_request(AuthorizationService.check_for_permission) - app.after_request(set_new_access_token_in_cookie) + app.after_request(_set_new_access_token_in_cookie) # The default is true, but we want to preserve the order of keys in the json # This is particularly helpful for forms that are generated from json schemas. diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 5eed2443..23939bbb 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -3457,3 +3457,14 @@ components: fiilterable: type: string nullable: false + + ServiceAccountRequest: + properties: + name: + type: string + nullable: false + ServiceAccountApiKey: + properties: + api_key: + type: string + nullable: false diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py b/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py index a4f85d85..dcf71245 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py @@ -94,5 +94,8 @@ from spiffworkflow_backend.models.configuration import ( from spiffworkflow_backend.models.user_property import ( UserPropertyModel, ) # noqa: F401 +from spiffworkflow_backend.models.service_account import ( + ServiceAccountModel, +) # noqa: F401 add_listeners() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/db.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/db.py index c7e1862a..3b6f8837 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/db.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/db.py @@ -14,6 +14,11 @@ db = SQLAlchemy() migrate = Migrate() +# NOTE: ensure all db models are added to src/spiffworkflow_backend/load_database_models.py so that: +# 1) they will be loaded in time for add_listeners. otherwise they may not auto-update created_at and updated_at times +# 2) database migration code picks them up when migrations are automatically generated + + class SpiffworkflowBaseDBModel(db.Model): # type: ignore __abstract__ = True diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/service_account.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/service_account.py new file mode 100644 index 00000000..3acf96e9 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/service_account.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +import time +import uuid +from dataclasses import dataclass +from hashlib import sha256 + +from sqlalchemy import ForeignKey +from sqlalchemy.orm import relationship + +from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel +from spiffworkflow_backend.models.db import db + +# this is designed to be used for the "service" column on the user table, which is designed to hold +# information about which authentiation system is used to authenticate this user. +# in this case, we are authenticating based on X-API-KEY which correlates to a known value in the spiff db. +SPIFF_SERVICE_ACCOUNT_AUTH_SERVICE = "spiff_service_account" +SPIFF_SERVICE_ACCOUNT_AUTH_SERVICE_ID_PREFIX = "service_account_" + + +@dataclass +class ServiceAccountModel(SpiffworkflowBaseDBModel): + __tablename__ = "service_account" + __allow_unmapped__ = True + __table_args__ = (db.UniqueConstraint("name", "created_by_user_id", name="service_account_uniq"),) + + id: int = db.Column(db.Integer, primary_key=True) + name: str = db.Column(db.String(255), nullable=False, unique=False, index=True) + user_id: int = db.Column(ForeignKey("user.id"), nullable=False, index=True) + created_by_user_id: int = db.Column(ForeignKey("user.id"), nullable=False, index=True) + + api_key_hash: str = db.Column(db.String(255), nullable=False, unique=True, index=True) + + user = relationship("UserModel", uselist=False, cascade="delete", foreign_keys=[user_id]) # type: ignore + + updated_at_in_seconds: int = db.Column(db.Integer) + created_at_in_seconds: int = db.Column(db.Integer) + + # only to used when the service account first created to tell the user what the key is + api_key: str | None = None + + @classmethod + def generate_api_key(cls) -> str: + return str(uuid.uuid4()) + + @classmethod + def hash_api_key(cls, unencrypted_api_key: str) -> str: + return sha256(unencrypted_api_key.encode("utf8")).hexdigest() + + @classmethod + def generate_username_for_related_user(cls, service_account_name: str, created_by_user_id: int) -> str: + # add fuzz to username so a user can delete and recreate an api_key with the same name + # also make the username readable so we know where it came from even after the service account is deleted + creation_time_for_fuzz = time.time() + return f"{service_account_name}_{created_by_user_id}_{creation_time_for_fuzz}" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py index 1f3f3f65..a62353d8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py @@ -4,9 +4,7 @@ from dataclasses import dataclass from typing import Any import jwt -import marshmallow from flask import current_app -from marshmallow import Schema from sqlalchemy.orm import relationship from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel @@ -81,14 +79,3 @@ class UserModel(SpiffworkflowBaseDBModel): user_as_json_string = current_app.json.dumps(self) user_dict: dict[str, Any] = current_app.json.loads(user_as_json_string) return user_dict - - -class UserModelSchema(Schema): - class Meta: - model = UserModel - # load_instance = True - # include_relationships = False - # exclude = ("UserGroupAssignment",) - - id = marshmallow.fields.String(required=True) - username = marshmallow.fields.String(required=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/extensions_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/extensions_controller.py index 749d59a9..21a94d5d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/extensions_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/extensions_controller.py @@ -101,10 +101,11 @@ def _run_extension( process_model = _get_process_model(process_model_identifier) except ApiError as ex: if ex.error_code == "process_model_cannot_be_found": + # if process_model_identifier.startswith(current_app.config["SPIFFWORKFLOW_BACKEND_EXTENSIONS_PROCESS_MODEL_PREFIX"]) raise ApiError( error_code="invalid_process_model_extension", message=( - f"Process Model '{process_model_identifier}' cannot be run as an extension. It must be in the" + f"Process Model '{process_model_identifier}' could not be found as an extension. It must be in the" " correct Process Group:" f" {current_app.config['SPIFFWORKFLOW_BACKEND_EXTENSIONS_PROCESS_MODEL_PREFIX']}" ), @@ -124,9 +125,11 @@ def _run_extension( ui_schema_action = None persistence_level = "none" + process_id_to_run = None if body and "ui_schema_action" in body: ui_schema_action = body["ui_schema_action"] persistence_level = ui_schema_action.get("persistence_level", "none") + process_id_to_run = ui_schema_action.get("process_id_to_run", None) process_instance = None if persistence_level == "none": @@ -145,7 +148,9 @@ def _run_extension( processor = None try: processor = ProcessInstanceProcessor( - process_instance, script_engine=CustomBpmnScriptEngine(use_restricted_script_engine=False) + process_instance, + script_engine=CustomBpmnScriptEngine(use_restricted_script_engine=False), + process_id_to_run=process_id_to_run, ) if body and "extension_input" in body: processor.do_engine_steps(save=False, execution_strategy_name="run_current_ready_tasks") @@ -166,12 +171,13 @@ def _run_extension( # we need to recurse through all last tasks if the last task is a call activity or subprocess. if processor is not None: task = processor.bpmn_process_instance.last_task - raise ApiError.from_task( - error_code="unknown_exception", - message=f"An unknown error occurred. Original error: {e}", - status_code=400, - task=task, - ) from e + if task is not None: + raise ApiError.from_task( + error_code="unknown_exception", + message=f"An unknown error occurred. Original error: {e}", + status_code=400, + task=task, + ) from e raise e task_data = {} diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py index f421b653..d70bf854 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py @@ -18,6 +18,7 @@ from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX from spiffworkflow_backend.models.group import SPIFF_GUEST_GROUP from spiffworkflow_backend.models.group import SPIFF_NO_AUTH_GROUP +from spiffworkflow_backend.models.service_account import ServiceAccountModel from spiffworkflow_backend.models.task import TaskModel # noqa: F401 from spiffworkflow_backend.models.user import SPIFF_GUEST_USER from spiffworkflow_backend.models.user import SPIFF_NO_AUTH_USER @@ -51,155 +52,35 @@ def verify_token(token: str | None = None, force_run: bool | None = False) -> No ApiError: If not on production and token is not valid, returns an 'invalid_token' 403 error. If on production and user is not authenticated, returns a 'no_user' 403 error. """ - user_info = None if not force_run and AuthorizationService.should_disable_auth_for_request(): return None - if not token and "Authorization" in request.headers: - token = request.headers["Authorization"].removeprefix("Bearer ") - - if not token and "access_token" in request.cookies: - if request.path.startswith(f"{V1_API_PATH_PREFIX}/process-data-file-download/") or request.path.startswith( - f"{V1_API_PATH_PREFIX}/extensions-get-data/" - ): - token = request.cookies["access_token"] + token_info = _find_token_from_headers(token) # This should never be set here but just in case _clear_auth_tokens_from_thread_local_data() - if token: - user_model = None - decoded_token = get_decoded_token(token) + user_model = None + if token_info["token"] is not None: + # import pdb; pdb.set_trace() + user_model = _get_user_model_from_token(token_info["token"]) + elif token_info["api_key"] is not None: + user_model = _get_user_model_from_api_key(token_info["api_key"]) - if decoded_token is not None: - if "token_type" in decoded_token: - token_type = decoded_token["token_type"] - if token_type == "internal": # noqa: S105 - try: - user_model = get_user_from_decoded_internal_token(decoded_token) - except Exception as e: - current_app.logger.error( - f"Exception in verify_token getting user from decoded internal token. {e}" - ) + if user_model: + g.user = user_model - # if the user is forced logged out then stop processing the token - if _force_logout_user_if_necessary(user_model): - return None - - elif "iss" in decoded_token.keys(): - user_info = None - try: - if AuthenticationService.validate_id_or_access_token(token): - user_info = decoded_token - except TokenExpiredError as token_expired_error: - # Try to refresh the token - user = UserService.get_user_by_service_and_service_id(decoded_token["iss"], decoded_token["sub"]) - if user: - refresh_token = AuthenticationService.get_refresh_token(user.id) - if refresh_token: - auth_token: dict = AuthenticationService.get_auth_token_from_refresh_token(refresh_token) - if auth_token and "error" not in auth_token and "id_token" in auth_token: - tld = current_app.config["THREAD_LOCAL_DATA"] - tld.new_access_token = auth_token["id_token"] - tld.new_id_token = auth_token["id_token"] - # We have the user, but this code is a bit convoluted, and will later demand - # a user_info object so it can look up the user. Sorry to leave this crap here. - user_info = { - "sub": user.service_id, - "iss": user.service, - } - - if user_info is None: - raise ApiError( - error_code="invalid_token", - message="Your token is expired. Please Login", - status_code=401, - ) from token_expired_error - - except Exception as e: - raise ApiError( - error_code="fail_get_user_info", - message="Cannot get user info from token", - status_code=401, - ) from e - if ( - user_info is not None and "error" not in user_info and "iss" in user_info - ): # not sure what to test yet - user_model = ( - UserModel.query.filter(UserModel.service == user_info["iss"]) - .filter(UserModel.service_id == user_info["sub"]) - .first() - ) - if user_model is None: - raise ApiError( - error_code="invalid_user", - message="Invalid user. Please log in.", - status_code=401, - ) - # no user_info - else: - raise ApiError( - error_code="no_user_info", - message="Cannot retrieve user info", - status_code=401, - ) - - else: - current_app.logger.debug("token_type not in decode_token in verify_token") - raise ApiError( - error_code="invalid_token", - message="Invalid token. Please log in.", - status_code=401, - ) - - if user_model: - g.user = user_model - - # If the user is valid, store the token for this session - if g.user: + # If the user is valid, store the token for this session + if g.user: + if token_info["token"]: # This is an id token, so we don't have a refresh token yet - g.token = token - get_scope(token) - return None - else: - raise ApiError(error_code="no_user_id", message="Cannot get a user id") + g.token = token_info["token"] + get_scope(token_info["token"]) + return None raise ApiError(error_code="invalid_token", message="Cannot validate token.", status_code=401) -def set_new_access_token_in_cookie( - response: flask.wrappers.Response, -) -> flask.wrappers.Response: - """Checks if a new token has been set in THREAD_LOCAL_DATA and sets cookies if appropriate. - - It will also delete the cookies if the user has logged out. - """ - tld = current_app.config["THREAD_LOCAL_DATA"] - domain_for_frontend_cookie: str | None = re.sub( - r"^https?:\/\/", - "", - current_app.config["SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND"], - ) - if domain_for_frontend_cookie and domain_for_frontend_cookie.startswith("localhost"): - domain_for_frontend_cookie = None - - # fixme - we should not be passing the access token back to the client - if hasattr(tld, "new_access_token") and tld.new_access_token: - response.set_cookie("access_token", tld.new_access_token, domain=domain_for_frontend_cookie) - - # id_token is required for logging out since this gets passed back to the openid server - if hasattr(tld, "new_id_token") and tld.new_id_token: - response.set_cookie("id_token", tld.new_id_token, domain=domain_for_frontend_cookie) - - if hasattr(tld, "user_has_logged_out") and tld.user_has_logged_out: - response.set_cookie("id_token", "", max_age=0, domain=domain_for_frontend_cookie) - response.set_cookie("access_token", "", max_age=0, domain=domain_for_frontend_cookie) - - _clear_auth_tokens_from_thread_local_data() - - return response - - def login(redirect_url: str = "/", process_instance_id: int | None = None, task_guid: str | None = None) -> Response: if current_app.config.get("SPIFFWORKFLOW_BACKEND_AUTHENTICATION_DISABLED"): AuthorizationService.create_guest_token( @@ -223,25 +104,13 @@ def login(redirect_url: str = "/", process_instance_id: int | None = None, task_ return redirect(login_redirect_url) -def parse_id_token(token: str) -> Any: - """Parse the id token.""" - parts = token.split(".") - if len(parts) != 3: - raise Exception("Incorrect id token format") - - payload = parts[1] - padded = payload + "=" * (4 - len(payload) % 4) - decoded = base64.b64decode(padded) - return json.loads(decoded) - - def login_return(code: str, state: str, session_state: str = "") -> Response | None: state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8")) state_redirect_url = state_dict["redirect_url"] auth_token_object = AuthenticationService().get_auth_token_object(code) if "id_token" in auth_token_object: id_token = auth_token_object["id_token"] - user_info = parse_id_token(id_token) + user_info = _parse_id_token(id_token) if AuthenticationService.validate_id_or_access_token(id_token): if user_info and "error" not in user_info: @@ -273,7 +142,7 @@ def login_return(code: str, state: str, session_state: str = "") -> Response | N # FIXME: share more code with login_return and maybe attempt to get a refresh token def login_with_access_token(access_token: str) -> Response: - user_info = parse_id_token(access_token) + user_info = _parse_id_token(access_token) if AuthenticationService.validate_id_or_access_token(access_token): if user_info and "error" not in user_info: @@ -320,22 +189,6 @@ def logout_return() -> Response: return redirect(f"{frontend_url}/") -def get_decoded_token(token: str) -> dict | None: - try: - decoded_token = jwt.decode(token, options={"verify_signature": False}) - except Exception as e: - raise ApiError(error_code="invalid_token", message="Cannot decode token.") from e - else: - if "token_type" in decoded_token or "iss" in decoded_token: - return decoded_token - else: - current_app.logger.error(f"Unknown token type in get_decoded_token: token: {token}") - raise ApiError( - error_code="unknown_token", - message="Unknown token type in get_decoded_token", - ) - - def get_scope(token: str) -> str: scope = "" decoded_token = jwt.decode(token, options={"verify_signature": False}) @@ -344,18 +197,38 @@ def get_scope(token: str) -> str: return scope -def get_user_from_decoded_internal_token(decoded_token: dict) -> UserModel | None: - sub = decoded_token["sub"] - parts = sub.split("::") - service = parts[0].split(":")[1] - service_id = parts[1].split(":")[1] - user: UserModel = ( - UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first() +# this isn't really a private method but it's also not a valid api call so underscoring it +def _set_new_access_token_in_cookie( + response: flask.wrappers.Response, +) -> flask.wrappers.Response: + """Checks if a new token has been set in THREAD_LOCAL_DATA and sets cookies if appropriate. + + It will also delete the cookies if the user has logged out. + """ + tld = current_app.config["THREAD_LOCAL_DATA"] + domain_for_frontend_cookie: str | None = re.sub( + r"^https?:\/\/", + "", + current_app.config["SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND"], ) - if user: - return user - user = UserService.create_user(service_id, service, service_id) - return user + if domain_for_frontend_cookie and domain_for_frontend_cookie.startswith("localhost"): + domain_for_frontend_cookie = None + + # fixme - we should not be passing the access token back to the client + if hasattr(tld, "new_access_token") and tld.new_access_token: + response.set_cookie("access_token", tld.new_access_token, domain=domain_for_frontend_cookie) + + # id_token is required for logging out since this gets passed back to the openid server + if hasattr(tld, "new_id_token") and tld.new_id_token: + response.set_cookie("id_token", tld.new_id_token, domain=domain_for_frontend_cookie) + + if hasattr(tld, "user_has_logged_out") and tld.user_has_logged_out: + response.set_cookie("id_token", "", max_age=0, domain=domain_for_frontend_cookie) + response.set_cookie("access_token", "", max_age=0, domain=domain_for_frontend_cookie) + + _clear_auth_tokens_from_thread_local_data() + + return response def _clear_auth_tokens_from_thread_local_data() -> None: @@ -388,3 +261,158 @@ def _force_logout_user_if_necessary(user_model: UserModel | None = None) -> bool tld.user_has_logged_out = True return True return False + + +def _find_token_from_headers(token: str | None) -> dict[str, str | None]: + api_key = None + if not token and "Authorization" in request.headers: + token = request.headers["Authorization"].removeprefix("Bearer ") + + if not token and "access_token" in request.cookies: + if request.path.startswith(f"{V1_API_PATH_PREFIX}/process-data-file-download/") or request.path.startswith( + f"{V1_API_PATH_PREFIX}/extensions-get-data/" + ): + token = request.cookies["access_token"] + + if not token and "X-API-KEY" in request.headers: + api_key = request.headers["X-API-KEY"] + + token_info = {"token": token, "api_key": api_key} + return token_info + + +def _get_user_model_from_api_key(api_key: str) -> UserModel | None: + api_key_hash = ServiceAccountModel.hash_api_key(api_key) + service_account = ServiceAccountModel.query.filter_by(api_key_hash=api_key_hash).first() + user_model = None + if service_account is not None: + user_model = UserModel.query.filter_by(id=service_account.user_id).first() + return user_model + + +def _get_user_model_from_token(token: str) -> UserModel | None: + user_model = None + decoded_token = _get_decoded_token(token) + + if decoded_token is not None: + if "token_type" in decoded_token: + token_type = decoded_token["token_type"] + if token_type == "internal": # noqa: S105 + try: + user_model = _get_user_from_decoded_internal_token(decoded_token) + except Exception as e: + current_app.logger.error( + f"Exception in verify_token getting user from decoded internal token. {e}" + ) + + # if the user is forced logged out then stop processing the token + if _force_logout_user_if_necessary(user_model): + return None + + elif "iss" in decoded_token.keys(): + user_info = None + try: + if AuthenticationService.validate_id_or_access_token(token): + user_info = decoded_token + except TokenExpiredError as token_expired_error: + # Try to refresh the token + user = UserService.get_user_by_service_and_service_id(decoded_token["iss"], decoded_token["sub"]) + if user: + refresh_token = AuthenticationService.get_refresh_token(user.id) + if refresh_token: + auth_token: dict = AuthenticationService.get_auth_token_from_refresh_token(refresh_token) + if auth_token and "error" not in auth_token and "id_token" in auth_token: + tld = current_app.config["THREAD_LOCAL_DATA"] + tld.new_access_token = auth_token["id_token"] + tld.new_id_token = auth_token["id_token"] + # We have the user, but this code is a bit convoluted, and will later demand + # a user_info object so it can look up the user. Sorry to leave this crap here. + user_info = { + "sub": user.service_id, + "iss": user.service, + } + + if user_info is None: + raise ApiError( + error_code="invalid_token", + message="Your token is expired. Please Login", + status_code=401, + ) from token_expired_error + + except Exception as e: + raise ApiError( + error_code="fail_get_user_info", + message="Cannot get user info from token", + status_code=401, + ) from e + if user_info is not None and "error" not in user_info and "iss" in user_info: # not sure what to test yet + user_model = ( + UserModel.query.filter(UserModel.service == user_info["iss"]) + .filter(UserModel.service_id == user_info["sub"]) + .first() + ) + if user_model is None: + raise ApiError( + error_code="invalid_user", + message="Invalid user. Please log in.", + status_code=401, + ) + # no user_info + else: + raise ApiError( + error_code="no_user_info", + message="Cannot retrieve user info", + status_code=401, + ) + + else: + current_app.logger.debug("token_type not in decode_token in verify_token") + raise ApiError( + error_code="invalid_token", + message="Invalid token. Please log in.", + status_code=401, + ) + + return user_model + + +def _get_user_from_decoded_internal_token(decoded_token: dict) -> UserModel | None: + sub = decoded_token["sub"] + parts = sub.split("::") + service = parts[0].split(":")[1] + service_id = parts[1].split(":")[1] + user: UserModel = ( + UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first() + ) + if user: + return user + user = UserService.create_user(service_id, service, service_id) + return user + + +def _get_decoded_token(token: str) -> dict | None: + try: + decoded_token = jwt.decode(token, options={"verify_signature": False}) + except Exception as e: + raise ApiError(error_code="invalid_token", message="Cannot decode token.") from e + else: + if "token_type" in decoded_token or "iss" in decoded_token: + return decoded_token + else: + current_app.logger.error(f"Unknown token type in get_decoded_token: token: {token}") + raise ApiError( + error_code="unknown_token", + message="Unknown token type in get_decoded_token", + ) + + +def _parse_id_token(token: str) -> Any: + """Parse the id token.""" + parts = token.split(".") + if len(parts) != 3: + raise Exception("Incorrect id token format") + + payload = parts[1] + padded = payload + "=" * (4 - len(payload) % 4) + decoded = base64.b64decode(padded) + return json.loads(decoded) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py index 55dd9a7c..4fb5d692 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py @@ -21,6 +21,7 @@ from spiffworkflow_backend.models.permission_assignment import PermissionAssignm from spiffworkflow_backend.models.permission_target import PermissionTargetModel from spiffworkflow_backend.models.principal import MissingPrincipalError from spiffworkflow_backend.models.principal import PrincipalModel +from spiffworkflow_backend.models.service_account import SPIFF_SERVICE_ACCOUNT_AUTH_SERVICE from spiffworkflow_backend.models.task import TaskModel # noqa: F401 from spiffworkflow_backend.models.user import SPIFF_GUEST_USER from spiffworkflow_backend.models.user import UserModel @@ -33,6 +34,7 @@ from spiffworkflow_backend.services.authentication_service import TokenNotProvid from spiffworkflow_backend.services.authentication_service import UserNotLoggedInError from spiffworkflow_backend.services.group_service import GroupService from spiffworkflow_backend.services.user_service import UserService +from sqlalchemy import and_ from sqlalchemy import or_ from sqlalchemy import text @@ -604,6 +606,8 @@ class AuthorizationService: PermissionToAssign(permission="update", target_uri="/authentication/configuration") ) + permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/service-accounts")) + return permissions_to_assign @classmethod @@ -891,7 +895,15 @@ class AuthorizationService: cls, group_permissions: list[GroupPermissionsDict], group_permissions_only: bool = False ) -> None: """Adds new permission assignments and deletes old ones.""" - initial_permission_assignments = PermissionAssignmentModel.query.all() + initial_permission_assignments = ( + PermissionAssignmentModel.query.outerjoin( + PrincipalModel, + and_(PrincipalModel.id == PermissionAssignmentModel.principal_id, PrincipalModel.user_id.is_not(None)), + ) + .outerjoin(UserModel, UserModel.id == PrincipalModel.user_id) + .filter(or_(UserModel.id.is_(None), UserModel.service != SPIFF_SERVICE_ACCOUNT_AUTH_SERVICE)) # type: ignore + .all() + ) initial_user_to_group_assignments = UserGroupAssignmentModel.query.all() group_permissions = group_permissions + cls.parse_permissions_yaml_into_group_info() added_permissions = cls.add_permissions_from_group_permissions( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 1309abd9..91ae3659 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -403,16 +403,22 @@ class ProcessInstanceProcessor: validate_only: bool = False, script_engine: PythonScriptEngine | None = None, workflow_completed_handler: WorkflowCompletedHandler | None = None, + process_id_to_run: str | None = None, ) -> None: """Create a Workflow Processor based on the serialized information available in the process_instance model.""" self._script_engine = script_engine or self.__class__._default_script_engine self._workflow_completed_handler = workflow_completed_handler self.setup_processor_with_process_instance( - process_instance_model=process_instance_model, validate_only=validate_only + process_instance_model=process_instance_model, + validate_only=validate_only, + process_id_to_run=process_id_to_run, ) def setup_processor_with_process_instance( - self, process_instance_model: ProcessInstanceModel, validate_only: bool = False + self, + process_instance_model: ProcessInstanceModel, + validate_only: bool = False, + process_id_to_run: str | None = None, ) -> None: tld = current_app.config["THREAD_LOCAL_DATA"] tld.process_instance_id = process_instance_model.id @@ -441,7 +447,7 @@ class ProcessInstanceProcessor: bpmn_process_spec, subprocesses, ) = ProcessInstanceProcessor.get_process_model_and_subprocesses( - process_instance_model.process_model_identifier + process_instance_model.process_model_identifier, process_id_to_run=process_id_to_run ) self.process_model_identifier = process_instance_model.process_model_identifier @@ -471,7 +477,9 @@ class ProcessInstanceProcessor: @classmethod def get_process_model_and_subprocesses( - cls, process_model_identifier: str + cls, + process_model_identifier: str, + process_id_to_run: str | None = None, ) -> tuple[BpmnProcessSpec, IdToBpmnProcessSpecMapping]: process_model_info = ProcessModelService.get_process_model(process_model_identifier) if process_model_info is None: @@ -482,7 +490,7 @@ class ProcessInstanceProcessor: ) ) spec_files = FileSystemService.get_files(process_model_info) - return cls.get_spec(spec_files, process_model_info) + return cls.get_spec(spec_files, process_model_info, process_id_to_run=process_id_to_run) @classmethod def get_bpmn_process_instance_from_process_model(cls, process_model_identifier: str) -> BpmnWorkflow: @@ -1303,11 +1311,15 @@ class ProcessInstanceProcessor: @staticmethod def get_spec( - files: list[File], process_model_info: ProcessModelInfo + files: list[File], + process_model_info: ProcessModelInfo, + process_id_to_run: str | None = None, ) -> tuple[BpmnProcessSpec, IdToBpmnProcessSpecMapping]: """Returns a SpiffWorkflow specification for the given process_instance spec, using the files provided.""" parser = ProcessInstanceProcessor.get_parser() + process_id = process_id_to_run or process_model_info.primary_process_id + for file in files: data = SpecFileService.get_data(process_model_info, file.name) try: @@ -1322,7 +1334,7 @@ class ProcessInstanceProcessor: error_code="invalid_xml", message=f"'{file.name}' is not a valid xml file." + str(xse), ) from xse - if process_model_info.primary_process_id is None or process_model_info.primary_process_id == "": + if process_id is None or process_id == "": raise ( ApiError( error_code="no_primary_bpmn_error", @@ -1332,10 +1344,10 @@ class ProcessInstanceProcessor: ProcessInstanceProcessor.update_spiff_parser_with_all_process_dependency_files(parser) try: - bpmn_process_spec = parser.get_spec(process_model_info.primary_process_id) + bpmn_process_spec = parser.get_spec(process_id) # returns a dict of {process_id: bpmn_process_spec}, otherwise known as an IdToBpmnProcessSpecMapping - subprocesses = parser.get_subprocess_specs(process_model_info.primary_process_id) + subprocesses = parser.get_subprocess_specs(process_id) except ValidationException as ve: raise ApiError( error_code="process_instance_validation_error", diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/service_account_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_account_service.py new file mode 100644 index 00000000..ab6fb38e --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_account_service.py @@ -0,0 +1,54 @@ +from spiffworkflow_backend.models.db import db +from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel +from spiffworkflow_backend.models.service_account import SPIFF_SERVICE_ACCOUNT_AUTH_SERVICE +from spiffworkflow_backend.models.service_account import SPIFF_SERVICE_ACCOUNT_AUTH_SERVICE_ID_PREFIX +from spiffworkflow_backend.models.service_account import ServiceAccountModel +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.user_service import UserService + + +class ServiceAccountService: + @classmethod + def create_service_account(cls, name: str, service_account_creator: UserModel) -> ServiceAccountModel: + api_key = ServiceAccountModel.generate_api_key() + api_key_hash = ServiceAccountModel.hash_api_key(api_key) + username = ServiceAccountModel.generate_username_for_related_user(name, service_account_creator.id) + service_account_user = UserModel( + username=username, + email=f"{username}@spiff.service.account.example.com", + service=SPIFF_SERVICE_ACCOUNT_AUTH_SERVICE, + service_id=f"{SPIFF_SERVICE_ACCOUNT_AUTH_SERVICE_ID_PREFIX}_{username}", + ) + db.session.add(service_account_user) + service_account = ServiceAccountModel( + name=name, + created_by_user_id=service_account_creator.id, + api_key_hash=api_key_hash, + user=service_account_user, + ) + db.session.add(service_account) + ServiceAccountModel.commit_with_rollback_on_exception() + cls.associated_service_account_with_permissions(service_account_user, service_account_creator) + service_account.api_key = api_key + return service_account + + @classmethod + def associated_service_account_with_permissions( + cls, service_account_user: UserModel, service_account_creator: UserModel + ) -> None: + principal = UserService.create_principal(service_account_user.id) + user_permissions = sorted(UserService.get_permission_targets_for_user(service_account_creator)) + + permission_objects = [] + for user_permission in user_permissions: + permission_objects.append( + PermissionAssignmentModel( + principal_id=principal.id, + permission_target_id=user_permission[0], + permission=user_permission[1], + grant_type=user_permission[2], + ) + ) + + db.session.bulk_save_objects(permission_objects) + ServiceAccountModel.commit_with_rollback_on_exception() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py index 41221722..52624b25 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py @@ -170,3 +170,27 @@ class UserService: human_task_user = HumanTaskUserModel(user_id=user.id, human_task_id=human_task.id) db.session.add(human_task_user) db.session.commit() + + @classmethod + def get_permission_targets_for_user(cls, user: UserModel, check_groups: bool = True) -> set[tuple[str, str, str]]: + unique_permission_assignments = set() + for permission_assignment in user.principal.permission_assignments: + unique_permission_assignments.add( + ( + permission_assignment.permission_target_id, + permission_assignment.permission, + permission_assignment.grant_type, + ) + ) + + if check_groups: + for group in user.groups: + for permission_assignment in group.principal.permission_assignments: + unique_permission_assignments.add( + ( + permission_assignment.permission_target_id, + permission_assignment.permission, + permission_assignment.grant_type, + ) + ) + return unique_permission_assignments diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_authentication.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_authentication.py index f02b1598..97dfcc4e 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_authentication.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_authentication.py @@ -9,6 +9,8 @@ from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.authentication_service import AuthenticationService from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import GroupPermissionsDict +from spiffworkflow_backend.services.service_account_service import ServiceAccountService +from spiffworkflow_backend.services.user_service import UserService from tests.spiffworkflow_backend.helpers.base_test import BaseTest @@ -87,3 +89,30 @@ class TestAuthentication(BaseTest): assert sorted(group_identifiers) == ["everybody", "group_one"] self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey") self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo") + + def test_does_not_remove_permissions_from_service_accounts_on_refresh( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + service_account = ServiceAccountService.create_service_account("sa_api_key", with_super_admin_user) + service_account_permissions_before = sorted( + UserService.get_permission_targets_for_user(service_account.user, check_groups=False) + ) + + # make sure running refresh_permissions doesn't remove the user from the group + group_info: list[GroupPermissionsDict] = [ + { + "users": [], + "name": "group_one", + "permissions": [{"actions": ["create", "read"], "uri": "PG:hey"}], + } + ] + AuthorizationService.refresh_permissions(group_info, group_permissions_only=True) + + service_account_permissions_after = sorted( + UserService.get_permission_targets_for_user(service_account.user, check_groups=False) + ) + assert service_account_permissions_before == service_account_permissions_after diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_service_accounts.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_service_accounts.py new file mode 100644 index 00000000..76bf53e4 --- /dev/null +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_service_accounts.py @@ -0,0 +1,48 @@ +import json + +from flask.app import Flask +from flask.testing import FlaskClient +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.service_account_service import ServiceAccountService +from spiffworkflow_backend.services.user_service import UserService + +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + + +class TestServiceAccounts(BaseTest): + def test_can_create_a_service_account( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + api_key_name = "heyhey" + service_account = ServiceAccountService.create_service_account(api_key_name, with_super_admin_user) + + assert service_account is not None + assert service_account.created_by_user_id == with_super_admin_user.id + assert service_account.name == api_key_name + assert service_account.api_key is not None + + # ci and local set different permissions for the admin user so figure out dynamically + admin_permissions = sorted(UserService.get_permission_targets_for_user(with_super_admin_user)) + service_account_permissions = sorted( + UserService.get_permission_targets_for_user(service_account.user, check_groups=False) + ) + assert admin_permissions == service_account_permissions + + # ensure service account can actually access the api + post_body = { + "key": "secret_key", + "value": "hey_value", + } + response = client.post( + "/v1.0/secrets", + content_type="application/json", + headers={"X-API-KEY": service_account.api_key}, + data=json.dumps(post_body), + ) + assert response.status_code == 201 + assert response.json is not None + assert response.json["key"] == post_body["key"] diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py index e77a59d9..8d76d2cf 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py @@ -515,5 +515,6 @@ class TestAuthorizationService(BaseTest): ("/secrets/*", "delete"), ("/secrets/*", "read"), ("/secrets/*", "update"), + ("/service-accounts", "create"), ] ) diff --git a/spiffworkflow-frontend/src/App.tsx b/spiffworkflow-frontend/src/App.tsx index 92f140fe..84b3f773 100644 --- a/spiffworkflow-frontend/src/App.tsx +++ b/spiffworkflow-frontend/src/App.tsx @@ -1,22 +1,11 @@ -// @ts-ignore -import { Content } from '@carbon/react'; - -import { BrowserRouter, Routes, Route } from 'react-router-dom'; +import { BrowserRouter } from 'react-router-dom'; import { defineAbility } from '@casl/ability'; import React from 'react'; -import NavigationBar from './components/NavigationBar'; - -import HomePageRoutes from './routes/HomePageRoutes'; -import About from './routes/About'; -import ErrorBoundary from './components/ErrorBoundary'; -import AdminRoutes from './routes/AdminRoutes'; import { AbilityContext } from './contexts/Can'; import UserService from './services/UserService'; import APIErrorProvider from './contexts/APIErrorContext'; -import ScrollToTop from './components/ScrollToTop'; -import EditorRoutes from './routes/EditorRoutes'; -import Extension from './routes/Extension'; +import ContainerForExtensions from './ContainerForExtensions'; export default function App() { if (!UserService.isLoggedIn()) { @@ -26,34 +15,13 @@ export default function App() { const ability = defineAbility(() => {}); - let contentClassName = 'main-site-body-centered'; - if (window.location.pathname.startsWith('/editor/')) { - contentClassName = 'no-center-stuff'; - } - return (